Compare commits

...

230 Commits

Author SHA1 Message Date
09322c5b80 add booking points to user table (#461) 2025-07-09 14:25:42 +03:00
74d3620d0e Chore/space link tag cleanup (#462)
* chore: remove unused imports and dead code for space link

* chore: remove unused imports and dead code

* chore: remove unused imports and dead code of tag service
2025-07-09 14:25:26 +03:00
83be80d9f6 add order space API (#459) 2025-07-09 11:44:14 +03:00
2589e391ed fix: add unique validation on subspaces in update space dto (#460) 2025-07-09 11:33:04 +03:00
5cf45c30f4 fix: check if device not found (#458) 2025-07-08 16:55:56 +03:00
0bb178ed10 make point nullable (#457) 2025-07-08 14:50:00 +03:00
9971fb953d SP-1812: Task/booking-system/update-api (#456)
* add update bookable spaces API

* add search to get bookable spaces API
2025-07-08 11:52:22 +03:00
7a07f39f16 add communities filter to devices by project API (#455) 2025-07-08 11:25:15 +03:00
18b21d697c SP-1753 Feat/booking system (#454)
* task: add get all bookable spaces API (#453)

* task: add non bookable space API
2025-07-08 09:02:24 +03:00
66391bafd8 SP-1814: Update out-of-date device-virtual-id (#451)
* task: add a function to update device tuya id

* Add Tuya const uuid to device
2025-07-07 10:38:20 +03:00
25599b9fe2 add '%' to search (#452) 2025-07-06 14:14:23 +03:00
807c5b7dd3 add tag uuid to add device API (#450) 2025-07-03 16:23:42 +03:00
60bc03cf79 fix deployed issue 2025-07-03 01:12:23 -06:00
a9eaf44d31 Merge branch 'main' into dev 2025-07-03 00:10:00 -06:00
d232c06ebe Merge pull request #449 from SyncrowIOT/fix-get-schedule-api
fix: adjust category handling for CUR_2 device type in schedule retrieval
2025-07-03 00:03:38 -06:00
5c916ed445 add pr template 2025-07-03 00:01:37 -06:00
b2fb378e52 Merge pull request #410 from SyncrowIOT/SP-1754-be-implement-configure-space
SP-1754-be-implement-configure-space
2025-07-02 02:26:29 -06:00
c5f8f96977 Merge branch 'dev' into SP-1754-be-implement-configure-space 2025-07-02 02:25:43 -06:00
8f9b15f49f fix: adjust category handling for CUR_2 device type in schedule retrieval 2025-06-30 07:12:43 -06:00
0b9eef276e ensure Timer is the category value for CUR2 type (#448) 2025-06-30 15:52:01 +03:00
b3f8b92826 ensure Timer is the category value for CUR2 type (#446) 2025-06-30 15:35:23 +03:00
b9da00aaa6 Merge pull request #447 from SyncrowIOT/fix/integrate-cur2-with-schedule
add cur2 checks to schedule
2025-06-30 06:27:27 -06:00
5bf44a18e1 add cur2 checks to schedule 2025-06-30 14:09:32 +03:00
2b2772e4ca Merge pull request #445 from SyncrowIOT/fix-update-aqi-data-on-staging
fix: filter daily averages by space_id and event_date in update procedure
2025-06-30 04:11:03 -06:00
13c0f87fc6 fix: filter daily averages by space_id and event_date in update procedure 2025-06-30 04:09:40 -06:00
c9d794d988 fix: update role type formatting in user invitation email 2025-06-30 01:25:09 -06:00
5d4e5ca87e Merge pull request #444 from SyncrowIOT/SP-1736-fe-on-user-management-page-when-i-invited-a-user-as-a-space-member-his-role-appeared-as-admin-in-the-email
SP-1736-fe-on-user-management-page-when-i-invited-a-user-as-a-space-member-his-role-appeared-as-admin-in-the-email
2025-06-30 01:23:55 -06:00
f4e748d735 fix: update role type formatting in user invitation email 2025-06-30 00:58:30 -06:00
f4f7999ae0 add device to firebase & stop moving it from the OEM space (#443) 2025-06-30 09:48:16 +03:00
82c82d521c add deviceName to handle password API (#442) 2025-06-30 08:57:43 +03:00
c7a4ff1194 fix: schedule device types (#441) 2025-06-29 15:27:55 +03:00
90ab291d83 add curtain module device (#440) 2025-06-29 10:10:19 +03:00
8a4633b158 Merge pull request #439 from SyncrowIOT/add-check-log-to-trace-the-map-issue
feat: enhance device status handling with caching and batch processin…
2025-06-25 18:59:37 -06:00
f80d097ff8 refactor: optimize log insertion and clean up device cache handling in TuyaWebSocketService 2025-06-25 18:57:56 -06:00
04bd156df1 Merge branch 'dev' into add-check-log-to-trace-the-map-issue 2025-06-25 18:42:43 -06:00
731819aeaa feat: enhance device status handling with caching and batch processing improvements 2025-06-25 18:37:46 -06:00
68d2d3b53d fix: improve device retrieval logic in addDeviceStatusToFirebase method 2025-06-25 08:13:02 -06:00
3fcfe2d92f Merge pull request #438 from SyncrowIOT/temp-fix-to-check
fix: enhance device status handling by integrating device cache for improved performance
2025-06-25 08:06:29 -06:00
c0a069b460 fix: enhance device status handling by integrating device cache for improved performance 2025-06-25 08:03:23 -06:00
5381a949bc task: delete used & its relations (#437) 2025-06-25 15:32:46 +03:00
30724d7d37 Merge pull request #436 from SyncrowIOT/add-check-log-to-trace-the-map-issue
fix: add validation for missing properties in device status logs
2025-06-25 05:32:50 -06:00
324661e1ee fix: add missing check for device UUID in batch processing logs 2025-06-25 05:30:15 -06:00
a83424f45b fix: remove unnecessary validation for missing properties in device status logs 2025-06-25 05:29:28 -06:00
71f6ccb4db fix: add validation for missing properties in device status logs 2025-06-25 05:20:26 -06:00
68692b7c8b increase rate limit to 100 per minute for each IP (#435) 2025-06-25 13:50:38 +03:00
4d60c1ed54 Merge pull request #434 from SyncrowIOT/fix-time-out-connections-db
Fix-time-out-connections-db
2025-06-25 04:47:59 -06:00
27dbe04299 fix: remove unnecessary comment from ScheduleModule import in scheduler module 2025-06-25 04:47:38 -06:00
9bebcb2f3e feat: implement scheduler for periodic data updates and optimize database procedures
- Added SchedulerModule and SchedulerService to handle hourly data updates for AQI, occupancy, and energy consumption.
- Refactored existing services to remove unused device repository dependencies and streamline procedure execution.
- Updated SQL procedures to use correct parameter indexing.
- Enhanced error handling and logging for scheduled tasks.
- Integrated new repositories for presence sensor and AQI pollutant stats across multiple modules.
- Added NestJS schedule package for task scheduling capabilities.
2025-06-25 03:20:25 -06:00
43ab0030f0 refactor: clean up unused services and optimize batch processing in DeviceStatusFirebaseService 2025-06-25 03:20:12 -06:00
c48adb73b5 Merge pull request #433 from SyncrowIOT/DATA-adjust-remaining-procedures
DATA-adjust-remaining-procedures
2025-06-25 01:55:12 -06:00
d255e6811e update procedures 2025-06-25 10:47:37 +03:00
e58d2d4831 Test/prevent server block on rate limit (#432) 2025-06-24 14:56:02 +03:00
147cf0b582 Merge pull request #431 from SyncrowIOT/DATA-adjust-procedures
DATA-adjust-procedures
2025-06-24 04:58:09 -06:00
4e6b6f6ac5 adjusted procedures 2025-06-24 13:04:21 +03:00
932a3efd1c Sp 1780 be configure the curtain module device (#424)
* task: add Cur new device configuration
2025-06-24 12:18:46 +03:00
0a1ccad120 add check if not space not found (#430) 2025-06-24 12:18:15 +03:00
f337e6c681 Test/prevent server block on rate limit (#421) 2025-06-24 10:55:38 +03:00
f5bf857071 Merge pull request #429 from SyncrowIOT/add-queue-event-handler
Add queue event handler
2025-06-23 08:13:36 -06:00
d1d4d529a8 Add methods to handle SOS events and device status updates in Firebase and our DB 2025-06-23 08:10:33 -06:00
37b582f521 Merge pull request #428 from SyncrowIOT/add-queue-event-handler
Implement message queue for TuyaWebSocketService and batch processing
2025-06-23 07:35:22 -06:00
cf19f08dca turn on all the updates data points 2025-06-23 07:33:01 -06:00
ff370b2baa Implement message queue for TuyaWebSocketService and batch processing 2025-06-23 07:31:58 -06:00
04f64407e1 turn off some update data points 2025-06-23 07:10:47 -06:00
d7eef5d03e Merge pull request #427 from SyncrowIOT/revert-426-SP-1778-be-fix-time-out-connections-in-the-db
Revert "SP-1778-be-fix-time-out-connections-in-the-db"
2025-06-23 07:09:20 -06:00
c8d691b380 tern off data procedure 2025-06-23 07:02:23 -06:00
75d03366c2 Revert "SP-1778-be-fix-time-out-connections-in-the-db" 2025-06-23 06:58:57 -06:00
52cb69cc84 Merge pull request #426 from SyncrowIOT/SP-1778-be-fix-time-out-connections-in-the-db
SP-1778-be-fix-time-out-connections-in-the-db
2025-06-23 06:38:58 -06:00
a6053b3971 refactor: implement query runners for database operations in multiple services 2025-06-23 06:34:53 -06:00
60d2c8330b fix: increase DB max pool size (#425) 2025-06-23 15:23:53 +03:00
fddd06e06d fix: add space condition to the join operator instead of general query (#423) 2025-06-23 12:44:19 +03:00
3160773c2a fix: spaces structure in communities (#420) 2025-06-23 10:21:55 +03:00
d3d84da5e3 fix: correct property name from bookableConfigs to bookableConfig in BookableSpaceEntity and SpaceEntity 2025-06-23 00:39:29 -06:00
110ed4157a task: add spaces filter to get devices by project (#422) 2025-06-23 09:34:59 +03:00
aa9e90bf08 Test/prevent server block on rate limit (#419)
* increase DB max connection to 50
2025-06-19 14:34:23 +03:00
c5dd5e28fd Test/prevent server block on rate limit (#418) 2025-06-19 13:54:22 +03:00
603e74af09 Test/prevent server block on rate limit (#417)
* task: add trust proxy header

* add logging

* task: test rate limits on sever

* task: increase rate limit timeout

* fix: merge conflicts
2025-06-19 12:54:59 +03:00
6973e8b195 task: sort communities by creation date (#416) 2025-06-19 11:13:24 +03:00
0e36f32ed6 Test/prevent server block on rate limit (#415)
* task: increase rate limit timeout
2025-06-19 10:15:29 +03:00
705ceeba29 Test/prevent server block on rate limit (#414)
* task: test rate limits on sever
2025-06-19 09:45:09 +03:00
92d102d08f Merge pull request #413 from SyncrowIOT/fix-staging-insirt-logs-data
Fix-staging-insirt-logs-data
2025-06-18 07:35:30 -06:00
7dc28d0cb3 fix: enable AQI sensor historical data update in device status processing 2025-06-18 07:32:39 -06:00
d9ad431a23 fix: correct procedure names in energy consumption updates 2025-06-18 05:33:49 -06:00
4bf43dab2b feat: enhance device status DTO and service with optional properties and environment checks 2025-06-18 05:33:43 -06:00
a37d5bb299 task: add trust proxy header (#411)
* task: add trust proxy header

* add logging
2025-06-18 12:05:53 +03:00
49cc762962 fix duplication from conflict merge 2025-06-18 01:56:51 -06:00
a94d4610ed Merge branch 'dev' into SP-1754-be-implement-configure-space 2025-06-18 01:55:48 -06:00
274cdf741f refactor: streamline Booking module and service by removing unused imports and consolidating space validation logic 2025-06-18 01:49:00 -06:00
df59e9a4a3 refactor: update BookableSpaceEntity relationship to OneToOne with SpaceEntity 2025-06-18 01:48:46 -06:00
8c34c68ba6 refactor: remove BookableSpaceDto and its index export 2025-06-18 01:48:35 -06:00
689a38ee0c Revamp/space management (#409)
* task: add getCommunitiesV2

* task: update getOneSpace API to match revamp structure

* refactor: implement modifications to pace management APIs

* refactor: remove space link
2025-06-18 10:34:29 +03:00
332b2f5851 feat: implement Booking module with BookableSpace entity, controller, service, and DTOs for managing bookable spaces 2025-06-17 22:02:13 -06:00
8d44b66dd3 feat: add BookableSpace entity, DTO, repository, and integrate with Space entity 2025-06-17 22:02:08 -06:00
7520b8d9c7 fix: power clamp historical API (#408) 2025-06-17 15:17:49 +03:00
72753b6dfb merge dev to main 2025-06-14 15:18:20 -06:00
568eef8119 Merge branch 'dev' 2025-06-14 15:04:48 -06:00
a91d0f22a4 fix: send correct enable status to email sender function (#407) 2025-06-13 09:46:41 +03:00
0db060ae3f Merge pull request #406 from SyncrowIOT/add-space-daily-occupancy-duration-entity
feat: add SpaceDailyOccupancyDuration entity, DTO, and repository for occupancy tracking
2025-06-12 04:17:18 -06:00
f2ed04f206 feat: add SpaceDailyOccupancyDuration entity, DTO, and repository for occupancy tracking 2025-06-12 02:49:59 -06:00
ea9a65178d fix: add space filter to "join" operation instead of "and" operation (#405) 2025-06-11 16:28:33 +03:00
8503ee728d Refactor/space management (#404)
* refactor: reducing used queries on get communities (#385)

* refactor: fix create space logic (#394)

* Remove unique constraint on subspace and product in SubspaceProductAllocationEntity; update product relation to nullable in NewTagEntity

* refactor: fix create space logic

* device model updated to include the fixes and final columns

* updated space models to include suggested fixes, update final logic and column names

* task: removing old references of the old tag-product relation

* task: remove old use of tags

* task: remove old tag & tag model usage

* refactor: delete space

* task: remove unused functions

* fix lint rule
2025-06-11 13:15:21 +03:00
4f5e1b23f6 Merge pull request #403 from SyncrowIOT/SP-1629-AND-SP-1630-AQI-APIs
Sp 1629 and sp 1630 aqi ap is
2025-06-11 00:28:14 -06:00
2cb77504ca Add PollutantType enum and update AQI-related entities and services to use it 2025-06-11 00:28:00 -06:00
c86be27576 Add AQI module and related services, controllers, and DTOs
- Introduced AqiModule with AqiService and AqiController for handling AQI data.
- Added DTOs for AQI requests: GetAqiDailyBySpaceDto and GetAqiPollutantBySpaceDto.
- Implemented AqiDataService for managing AQI sensor historical data.
- Updated existing modules to include AqiDataService where necessary.
- Defined new routes for AQI data retrieval in ControllerRoute.
2025-06-10 18:19:34 -06:00
3a08f9f258 Merge branch 'dev' into aqi-test 2025-06-10 17:08:06 -06:00
5c96a3b117 Merge pull request #402 from SyncrowIOT/add-code-and-value-as-uniqe-key
Refactor DeviceStatusLogEntity: expand unique constraint to include c…
2025-06-10 01:43:09 -06:00
97e14e70f7 Refactor DeviceStatusLogEntity: expand unique constraint to include code and value 2025-06-10 01:41:41 -06:00
03d44cb14f Merge pull request #401 from SyncrowIOT/fix-log-insert-error
Refactor DeviceStatusLogEntity: update unique constraint to include d…
2025-06-10 01:27:15 -06:00
0793441e06 Refactor DeviceStatusLogEntity: correct unique constraint name for event time and device ID 2025-06-10 01:24:33 -06:00
b6321c2530 Refactor DeviceStatusLogEntity: update unique constraint to include deviceId 2025-06-10 01:18:58 -06:00
b8d34b0d9f Merge pull request #400 from SyncrowIOT/revert-398-fix-log-duplication-issue
Revert "Refactor DeviceStatusLogEntity: update unique constraint and primary …"
2025-06-10 01:04:29 -06:00
c1065126aa Revert "Refactor DeviceStatusLogEntity: update unique constraint and primary …" 2025-06-10 01:03:45 -06:00
1742454984 Merge pull request #398 from SyncrowIOT/fix-log-duplication-issue
Refactor DeviceStatusLogEntity: update unique constraint and primary …
2025-06-10 00:26:43 -06:00
7eb13088ac Refactor DeviceStatusLogEntity: update unique constraint and primary key definition 2025-06-09 04:50:58 -06:00
7b97e50d2e Merge pull request #391 from SyncrowIOT/DATA-space-model-aqi-update-logic
AQI space model updated with new hourly to daily logic for calculatio…
2025-06-04 17:40:39 -04:00
4fb26fc131 Merge pull request #397 from SyncrowIOT/DATA-daily-procedure-aqi
Procedures insert-all, update, and select for daily space air quality…
2025-06-04 17:39:34 -04:00
ee0261d102 Fix typos procedure select and update 2025-06-04 17:32:50 -04:00
0d6de2df43 Refactor AqiSpaceDailyPollutantStatsEntity: update unique constraint and rename event fields for clarity 2025-06-04 15:24:13 -06:00
80e89dd035 fix name of snapshot table 2025-06-04 17:20:25 -04:00
466863e71f Procedures insert-all, update, and select for daily space air quality stats 2025-06-04 16:33:55 -04:00
30aafdede6 Merge pull request #390 from SyncrowIOT/DATA-device-model-aqi-update-logic
device model for aqi updated with hourly to daily logic getting max, …
2025-06-04 12:04:10 +03:00
01ce4d4b29 Merge pull request #396 from SyncrowIOT/fix-some-issue-in-get-commuinty-and-weather-apis
Fix some issue in get commuinty and weather apis
2025-06-04 01:32:21 -06:00
43dfaaa90d fix: utilize WEATHER_API_URL in WeatherService for dynamic API endpoint 2025-06-04 01:32:01 -06:00
ea021ad228 fix: update error message for invalid latitude and longitude in fetchWeatherDetails method 2025-06-04 01:09:49 -06:00
cd3e9016f2 fix: improve error handling in fetchWeatherDetails method 2025-06-04 01:08:33 -06:00
ef2245eae1 Add AQI space daily pollutant stats module and related entities, DTOs, and repositories 2025-06-03 23:37:52 -06:00
3ad81864d1 updated space models to include suggested fixes, update final logic and column names 2025-06-03 21:05:34 -04:00
ab3efedc35 device model updated to include the fixes and final columns 2025-06-03 20:52:27 -04:00
4a984ae5dd Merge pull request #395 from SyncrowIOT/SP-1678-be-implement-weather-apis-by-location
Add Weather module with controller, service, and DTO for fetching weather details
2025-06-03 03:05:01 -06:00
c39129f75b Add Weather module with controller, service, and DTO for fetching weather details 2025-06-03 02:38:44 -06:00
35ce13a67f fix: return proper error on login API (#386) 2025-06-03 09:47:24 +03:00
12a9272b8b Merge pull request #393 from SyncrowIOT/SP-1675-be-return-space-uuid-in-get-devices-api
SP-1675-be-return-space-uuid-in-get-devices-api
2025-06-02 01:29:04 -06:00
0fe6c80731 Add utility function to associate space UUID with devices in community and device services 2025-06-01 21:56:08 -06:00
81e017430e Merge pull request #392 from SyncrowIOT/temp-product-relation-fixes
Remove unique constraint on subspace and product in SubspaceProductAllocationEntity; update product relation to nullable in NewTagEntity
2025-06-02 06:20:03 +03:00
191d0dfaf6 Remove unique constraint on subspace and product in SubspaceProductAllocationEntity; update product relation to nullable in NewTagEntity 2025-06-01 21:16:51 -06:00
5b0135ba80 AQI space model updated with new hourly to daily logic for calculations and categorization of aqi brackets 2025-06-01 16:09:17 -04:00
2fee8c055e device model for aqi updated with hourly to daily logic getting max, min, avergae and percentage of categorical values for each aqi bracket 2025-06-01 16:03:07 -04:00
59161d4049 Merge pull request #389 from SyncrowIOT/DATA-daily-space-aqi-model
Air quality and AQI for space model
2025-06-01 14:31:13 -04:00
b989338790 Merge pull request #388 from SyncrowIOT/DATA-device-aqi-fix-update
fixed pm25 code + date format. Added average AQI level from device
2025-06-01 14:31:00 -04:00
f5ed9d4fce Merge pull request #387 from SyncrowIOT/DATA-occupancy-duration-fix
DATA-occupancy-duration-fix
2025-05-30 11:59:31 +03:00
3ac48183bd procedure fix test 2025-05-30 11:27:12 +03:00
684205053d testing fix 2025-05-30 10:51:26 +03:00
bfd92fdd87 fix 2025-05-29 15:05:06 +03:00
dd54af5f46 fix 2025-05-29 13:38:50 +03:00
90fc44ab53 Air quality and AQI for space model 2025-05-28 21:14:01 -04:00
efdf918159 fixed pm25 code + date format. Added average AQI level from device 2025-05-28 20:42:56 -04:00
25967d02f9 model adjustments 2025-05-28 13:55:56 +03:00
f44dc793a6 Merge pull request #383 from SyncrowIOT/daily-aqi-score
AQI score calculations and air quality model
2025-05-22 14:57:12 +03:00
a7c4bf1c3d AQI score calculations and air quality model 2025-05-22 07:28:50 -04:00
a8bb161ee2 Merge pull request #382 from SyncrowIOT/DATA-monthly-yearly-procedure-occupancy
DATA-occupancy_count_per_month
2025-05-22 14:18:18 +03:00
fe891030aa input year, output month 2025-05-22 13:10:05 +03:00
a40560a0b1 Merge pull request #380 from SyncrowIOT/revert-378-daily-aqi-sensor
Revert "SQL model for aqi score and processing air data"
2025-05-21 20:04:43 -04:00
7d6f1bb944 Revert "SQL model for aqi score and processing air data" 2025-05-21 20:01:05 -04:00
434316fe51 Merge pull request #378 from SyncrowIOT/daily-aqi-sensor
SQL model for aqi score and processing air data
2025-05-21 16:54:19 -04:00
287bb4c5e4 SQL model for aqi score and processing air data 2025-05-21 16:49:44 -04:00
f2e515b180 Merge pull request #377 from SyncrowIOT/SP-1561-be-get-all-projects-api-is-returning-internal-server-error
Sp 1561 be get all projects api is returning internal server error
2025-05-21 16:32:18 +03:00
f7fd96afa1 fix: remove unused params from get all projects api 2025-05-20 14:40:48 +03:00
5292271721 Merge pull request #376 from SyncrowIOT/SP-1555-implement-occupancy-api-for-analytics-dashboard
feat: add occupancy duration data retrieval and update procedures
2025-05-18 17:42:00 +03:00
180d16eeb1 feat: add occupancy duration data retrieval and update procedures 2025-05-18 17:40:41 +03:00
dca3db0c59 Merge pull request #373 from SyncrowIOT/DATA-space-occupancy-duration-procedure
DATA-space-occupancy-duration
2025-05-15 12:36:09 +03:00
56e78683b3 adjusted select 2025-05-15 12:28:38 +03:00
e575e51c4c Merge pull request #375 from SyncrowIOT/fix-duplication-community
feat: optimize getAllDevicesByCommunity to prevent duplicate space processing
2025-05-15 10:09:59 +03:00
8750da7e62 feat: optimize getAllDevicesByCommunity to prevent duplicate space processing 2025-05-15 10:09:29 +03:00
e3e9fe82fc Merge pull request #374 from SyncrowIOT/fix-duplication-devices-by-community
refactor: optimize device retrieval by avoiding duplicate space visits in getAllDevicesByCommunity
2025-05-15 10:08:31 +03:00
e253d1ca03 refactor: optimize device retrieval by avoiding duplicate space visits in getAllDevicesByCommunity 2025-05-15 10:08:02 +03:00
b50d7682f3 updates 2025-05-14 15:50:28 +03:00
1bb3803229 wording 2025-05-14 15:03:52 +03:00
92ee6ee951 bug fix 2025-05-14 14:11:36 +03:00
c06be4736c occupancy duration procedures 2025-05-14 13:34:29 +03:00
5cb4295f8a Merge pull request #372 from SyncrowIOT/fix-get-devices-by-product-type
feat: update DEVICE_SPACE_COMMUNITY route and add validation for spaceUuid and communityUuid in DTO
2025-05-14 13:00:46 +03:00
67331aa92a feat: update DEVICE_SPACE_COMMUNITY route and add validation for spaceUuid and communityUuid in DTO 2025-05-14 13:00:09 +03:00
7ec41f8311 Merge pull request #371 from SyncrowIOT/SP-1554-be-implement-get-all-devices-in-spaces-and-include-the-childs-for-analytics-dashboard
SP-1554-be-implement-get-all-devices-in-spaces-and-include-the-childs-for-analytics-dashboard
2025-05-13 03:20:08 +03:00
4aa3d04478 feat: update device and space services to use productType instead of deviceType and add query support for device retrieval by product type 2025-05-13 03:19:21 +03:00
799fcb6fb9 feat: add DEVICE_SPACE_COMMUNITY route and controller for device retrieval by space or community 2025-05-13 03:06:43 +03:00
921770ea79 Merge pull request #370 from SyncrowIOT/SP-1556-be-implement-occupancy-heat-map-api-for-analytics-dashboard
feat: add occupancy module with controller, service, and related DTOs for heat map data retrieval
2025-05-12 02:16:54 +03:00
7ec4171e1a feat: add occupancy module with controller, service, and related DTOs for heat map data retrieval 2025-05-12 02:15:57 +03:00
c0a6e9ab63 Merge pull request #368 from SyncrowIOT/DATA-space-occupancy-procedures
DATA-daily-space-occupancy-procedures
2025-05-12 01:02:38 +03:00
208281386d Merge pull request #369 from SyncrowIOT/SP-1552-be-implement-energy-consumption-include-all-device-api-for-analytics-dashboard
feat: add groupByDevice option to GetPowerClampBySpaceDto and update service logic
2025-05-11 02:00:51 +03:00
fb5084ba3a feat: add groupByDevice option to GetPowerClampBySpaceDto and update service logic 2025-05-11 01:59:26 +03:00
9c3abdd08a added on conflict 2025-05-09 16:25:30 +03:00
3535c1d8c5 month param 2025-05-09 15:47:05 +03:00
2ba5700fdd space occupancy API 2025-05-09 15:24:39 +03:00
1479c74423 Merge pull request #367 from SyncrowIOT/add-space-presence-sensor-detection-entity
refactor: rename presence sensor entities and update related references
2025-05-09 13:11:56 +03:00
8030644fee refactor: rename presence sensor entities and update related references 2025-05-09 13:11:14 +03:00
d43e860867 Merge pull request #366 from SyncrowIOT/DATA-param-removal
removed param from first CTE
2025-05-08 13:32:32 +03:00
f8269df3fb removed param from first CTE 2025-05-08 13:31:43 +03:00
85602fa952 check deployment 2025-05-08 13:15:31 +03:00
c085514d27 Merge pull request #365 from SyncrowIOT/DATA-date-param-move
DATA-param-moved
2025-05-08 13:12:21 +03:00
fa3cb578df param moved 2025-05-08 13:11:29 +03:00
25a4d3e91b Merge pull request #364 from SyncrowIOT/revert-363-DATA-date-param-filtering
Revert "DATA-date-param-moved"
2025-05-08 13:08:58 +03:00
d3a560d18f Revert "DATA-date-param-moved" 2025-05-08 13:08:41 +03:00
ab99bb5afc Merge pull request #363 from SyncrowIOT/DATA-date-param-filtering
DATA-date-param-moved
2025-05-08 13:07:51 +03:00
67911d5ff1 moved param 2025-05-08 13:06:39 +03:00
b4572beec2 Merge pull request #358 from SyncrowIOT/DATA-daily-occupancy-procedure
DATA-daily occupancy procedure
2025-05-08 12:55:42 +03:00
b3e86ec56f Merge pull request #362 from SyncrowIOT/fix-power-clamp-historical-data
Add endpoints and logic for fetching power clamp data by community or…
2025-05-07 23:09:58 +03:00
45b8cdcaae Add endpoints and logic for fetching power clamp data by community or space
- Introduced new API endpoints to retrieve power clamp historical data based on community or space UUID.
- Updated PowerClampController to handle requests with optional parameters for community and space.
- Enhanced PowerClampService to validate input and fetch devices accordingly.
- Created ResourceParamsDto to manage request parameters.
- Updated ControllerRoute with new action summaries and descriptions.
2025-05-07 23:09:01 +03:00
5ed59e4fcc Merge pull request #361 from SyncrowIOT/Implement-Total-Enargy-by-space-api
Implement total enargy by space api
2025-05-07 12:19:06 +03:00
91abfb41ab Implement month-based date formatting and filtering in PowerClamp service 2025-05-07 12:17:17 +03:00
d40fb7a762 Merge branch 'dev' into SP-1551-be-implement-total-energy-consumption-api-for-analytics-dashboard 2025-05-07 10:59:42 +03:00
71f795babe Merge pull request #360 from SyncrowIOT/DATA-energy_consumption_procedure_edits
DATA-daily_energy_consummed
2025-05-06 11:33:53 +03:00
0d48505eac month name 2025-05-06 11:30:23 +03:00
e538f2b829 grain change 2025-05-06 11:28:43 +03:00
23af8e9de3 Merge pull request #359 from SyncrowIOT/create-presence-sensor-daily-detection-entity
feat: add presence sensor module with entity, DTO, and repository
2025-05-04 22:36:52 +03:00
d197bf2bb4 feat: implement date formatting function and enhance PowerClampService with space-based data retrieval 2025-05-04 22:28:38 +03:00
2a1f1f52f6 feat: add presence sensor module with entity, DTO, and repository 2025-05-04 19:11:14 +03:00
13e3f3e213 Merge branch 'dev' 2025-04-29 09:58:05 +03:00
327d678656 Enhance TuyaWebSocketService to handle environment-specific message extraction 2025-03-28 03:40:09 +03:00
dd5447fc5f Merge pull request #311 from SyncrowIOT/dev 2025-03-13 13:56:50 +04:00
7df5b9ab08 Merge branch 'main' of https://github.com/SyncrowIOT/backend 2025-03-13 11:06:06 +03:00
06b4407b85 Merge branch 'dev' 2025-03-13 11:05:11 +03:00
1d6f3b8e65 Merge pull request #309 from SyncrowIOT:dev
propagating of space model to space
2025-03-13 00:27:23 +04:00
80659f7a48 Merge branch 'dev' 2025-03-12 02:22:33 +03:00
4a5f2f3b9f Merge branch 'dev' 2025-03-11 20:27:22 +03:00
a57f4e1c65 Merge branch 'dev' 2025-03-11 15:33:52 +03:00
b2d52c7622 Merge branch 'dev' 2025-02-20 03:46:08 -06:00
c9cbb2e085 Merge pull request #262 from SyncrowIOT/dev
change subspace tag movement
2025-02-19 13:11:46 +04:00
8aa3de5fdc config 2025-02-18 16:59:38 +04:00
bc1ee9a53b test deploy 2 2025-02-18 05:39:55 -06:00
19356c3833 test deploy 2025-02-18 05:35:06 -06:00
8737ee992b Update GitHub Actions workflow for Node.js app deployment to Azure 2025-02-18 05:08:51 -06:00
e98a99be73 Update GitHub Actions workflow for containerized deployment to Azure Web App 2025-02-18 05:03:05 -06:00
93efa15f3c Empty commit 2025-02-18 04:50:54 -06:00
c305e39ff2 Add or update the Azure App Service build and deployment workflow config 2025-02-18 04:34:31 -06:00
61e4d220dc test deploy 2025-02-18 04:15:24 -06:00
cd4bbe1788 Empty commit 2025-02-18 00:10:22 -06:00
d770a0c732 Remove robots.txt request handling middleware 2025-02-17 18:51:16 -06:00
030e6ae902 Add middleware to ignore requests for robots*.txt files 2025-02-17 18:43:43 -06:00
9d8287b82b Remove trailing whitespace in GitHub workflow file 2025-02-17 18:05:20 -06:00
d741a6c1f3 Empty commit 2025-02-17 17:50:51 -06:00
6d55704dd4 Merge branch 'dev' 2025-02-17 17:35:45 -06:00
d8ad9e55ea Merge pull request #253 from SyncrowIOT/dev
Dev
2025-02-06 09:26:54 +04:00
224 changed files with 11470 additions and 8999 deletions

View File

@ -21,6 +21,7 @@ module.exports = {
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
"@typescript-eslint/no-unused-vars": 'warn',
},
settings: {
'import/resolver': {

17
.github/pull_request_template.md vendored Normal file
View File

@ -0,0 +1,17 @@
<!--
Thanks for contributing!
Provide a description of your changes below and a general summary in the title.
-->
## Jira Ticket
[SP-0000](https://syncrow.atlassian.net/browse/SP-0000)
## Description
<!--- Describe your changes in detail -->
## How to Test
<!--- Describe the created APIs / Logic -->

View File

@ -1,4 +1,7 @@
name: Backend deployment to Azure App Service
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy
# More GitHub Actions for Azure: https://github.com/Azure/actions
name: Build and deploy container app to Azure Web App - syncrow(staging)
on:
push:
@ -6,50 +9,43 @@ on:
- main
workflow_dispatch:
env:
AZURE_WEB_APP_NAME: 'syncrow'
AZURE_WEB_APP_SLOT_NAME: 'staging'
ACR_REGISTRY: 'syncrow.azurecr.io'
IMAGE_NAME: 'backend'
IMAGE_TAG: 'latest'
jobs:
build_and_deploy:
runs-on: ubuntu-latest
build:
runs-on: 'ubuntu-latest'
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
- name: Set up Node.js
uses: actions/setup-node@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to registry
uses: docker/login-action@v2
with:
node-version: '20'
registry: https://syncrow.azurecr.io/
username: ${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}
password: ${{ secrets.AzureAppService_ContainerPassword_e7b0ff54f54d44cba04a970a22384848 }}
- name: Install dependencies and build project
run: |
npm install
npm run build
- name: Log in to Azure
uses: azure/login@v1
- name: Build and push container image to registry
uses: docker/build-push-action@v3
with:
creds: ${{ secrets.AZURE_CREDENTIALS }}
push: true
tags: syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}
file: ./Dockerfile
- name: Log in to Azure Container Registry
run: az acr login --name ${{ env.ACR_REGISTRY }}
deploy:
runs-on: ubuntu-latest
needs: build
environment:
name: 'staging'
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
- name: List build output
run: ls -R dist/
- name: Build and push Docker image
run: |
docker build . -t ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
docker push ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
- name: Set Web App with Docker container
run: |
az webapp config container set \
--name ${{ env.AZURE_WEB_APP_NAME }} \
--resource-group backend \
--docker-custom-image-name ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} \
--docker-registry-server-url https://${{ env.ACR_REGISTRY }}
steps:
- name: Deploy to Azure Web App
id: deploy-to-webapp
uses: azure/webapps-deploy@v2
with:
app-name: 'syncrow'
slot-name: 'staging'
publish-profile: ${{ secrets.AzureAppService_PublishProfile_44f7766441ec4796b74789e9761ef589 }}
images: 'syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}'

73
.github/workflows/main_syncrow(stg).yml vendored Normal file
View File

@ -0,0 +1,73 @@
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy
# More GitHub Actions for Azure: https://github.com/Azure/actions
name: Build and deploy Node.js app to Azure Web App - syncrow
on:
push:
branches:
- main
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read #This is required for actions/checkout
steps:
- uses: actions/checkout@v4
- name: Set up Node.js version
uses: actions/setup-node@v3
with:
node-version: '20.x'
- name: npm install, build, and test
run: |
npm install
npm run build --if-present
npm run test --if-present
- name: Zip artifact for deployment
run: zip release.zip ./* -r
- name: Upload artifact for deployment job
uses: actions/upload-artifact@v4
with:
name: node-app
path: release.zip
deploy:
runs-on: ubuntu-latest
needs: build
environment:
name: 'stg'
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
permissions:
id-token: write #This is required for requesting the JWT
contents: read #This is required for actions/checkout
steps:
- name: Download artifact from build job
uses: actions/download-artifact@v4
with:
name: node-app
- name: Unzip artifact for deployment
run: unzip release.zip
- name: Login to Azure
uses: azure/login@v2
with:
client-id: ${{ secrets.AZUREAPPSERVICE_CLIENTID_515C8E782CFF431AB20448C85CA0FE58 }}
tenant-id: ${{ secrets.AZUREAPPSERVICE_TENANTID_2AEFE5534424490387C08FAE41573CC2 }}
subscription-id: ${{ secrets.AZUREAPPSERVICE_SUBSCRIPTIONID_00623C33023749FEA5F6BC36884F9C8A }}
- name: 'Deploy to Azure Web App'
id: deploy-to-webapp
uses: azure/webapps-deploy@v3
with:
app-name: 'syncrow'
slot-name: 'stg'
package: .

2
.gitignore vendored
View File

@ -4,7 +4,7 @@
/build
#github
/.github
/.github/workflows
# Logs
logs

View File

@ -1,18 +1,19 @@
import { PlatformType } from '@app/common/constants/platform-type.enum';
import { RoleType } from '@app/common/constants/role.type.enum';
import { UserEntity } from '@app/common/modules/user/entities';
import {
BadRequestException,
Injectable,
UnauthorizedException,
} from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { JwtService } from '@nestjs/jwt';
import * as argon2 from 'argon2';
import { HelperHashService } from '../../helper/services';
import { UserRepository } from '../../../../common/src/modules/user/repositories';
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
import { ConfigService } from '@nestjs/config';
import { OAuth2Client } from 'google-auth-library';
import { PlatformType } from '@app/common/constants/platform-type.enum';
import { RoleType } from '@app/common/constants/role.type.enum';
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
import { UserRepository } from '../../../../common/src/modules/user/repositories';
import { HelperHashService } from '../../helper/services';
@Injectable()
export class AuthService {
@ -32,7 +33,7 @@ export class AuthService {
pass: string,
regionUuid?: string,
platform?: PlatformType,
): Promise<any> {
): Promise<Omit<UserEntity, 'password'>> {
const user = await this.userRepository.findOne({
where: {
email,
@ -40,16 +41,17 @@ export class AuthService {
},
relations: ['roleType', 'project'],
});
if (
platform === PlatformType.WEB &&
(user.roleType.type === RoleType.SPACE_OWNER ||
user.roleType.type === RoleType.SPACE_MEMBER)
) {
throw new UnauthorizedException('Access denied for web platform');
}
if (!user) {
throw new BadRequestException('Invalid credentials');
}
if (
platform === PlatformType.WEB &&
[RoleType.SPACE_OWNER, RoleType.SPACE_MEMBER].includes(
user.roleType.type as RoleType,
)
) {
throw new UnauthorizedException('Access denied for web platform');
}
if (!user.isUserVerified) {
throw new BadRequestException('User is not verified');
@ -69,8 +71,9 @@ export class AuthService {
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { password, ...result } = user;
return result;
// const { password, ...result } = user;
delete user.password;
return user;
}
async createSession(data): Promise<UserSessionEntity> {
@ -113,6 +116,7 @@ export class AuthService {
hasAcceptedWebAgreement: user.hasAcceptedWebAgreement,
hasAcceptedAppAgreement: user.hasAcceptedAppAgreement,
project: user?.project,
bookingPoints: user?.bookingPoints,
};
if (payload.googleCode) {
const profile = await this.getProfile(payload.googleCode);

View File

@ -69,7 +69,28 @@ export class ControllerRoute {
'Retrieve the list of all regions registered in Syncrow.';
};
};
static BOOKABLE_SPACES = class {
public static readonly ROUTE = 'bookable-spaces';
static ACTIONS = class {
public static readonly ADD_BOOKABLE_SPACES_SUMMARY =
'Add new bookable spaces';
public static readonly ADD_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint allows you to add new bookable spaces by providing the required details.';
public static readonly GET_ALL_BOOKABLE_SPACES_SUMMARY =
'Get all bookable spaces';
public static readonly GET_ALL_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint retrieves all bookable spaces.';
public static readonly UPDATE_BOOKABLE_SPACES_SUMMARY =
'Update existing bookable spaces';
public static readonly UPDATE_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint allows you to update existing bookable spaces by providing the required details.';
};
};
static COMMUNITY = class {
public static readonly ROUTE = '/projects/:projectUuid/communities';
static ACTIONS = class {
@ -199,6 +220,11 @@ export class ControllerRoute {
public static readonly UPDATE_SPACE_DESCRIPTION =
'Updates a space by its UUID and community ID. You can update the name, parent space, and other properties. If a parent space is provided and not already a parent, its `isParent` flag will be set to true.';
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_SUMMARY =
'Update the order of child spaces under a specific parent space';
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_DESCRIPTION =
'Updates the order of child spaces under a specific parent space. You can provide a new order for the child spaces.';
public static readonly GET_HEIRARCHY_SUMMARY = 'Get space hierarchy';
public static readonly GET_HEIRARCHY_DESCRIPTION =
'This endpoint retrieves the hierarchical structure of spaces under a given space ID. It returns all the child spaces nested within the specified space, organized by their parent-child relationships. ';
@ -397,6 +423,11 @@ export class ControllerRoute {
public static readonly DELETE_USER_SUMMARY = 'Delete user by UUID';
public static readonly DELETE_USER_DESCRIPTION =
'This endpoint deletes a user identified by their UUID. Accessible only by users with the Super Admin role.';
public static readonly DELETE_USER_PROFILE_SUMMARY =
'Delete user profile by UUID';
public static readonly DELETE_USER_PROFILE_DESCRIPTION =
'This endpoint deletes a user profile identified by their UUID. Accessible only by users with the Super Admin role.';
public static readonly UPDATE_USER_WEB_AGREEMENT_SUMMARY =
'Update user web agreement by user UUID';
public static readonly UPDATE_USER_WEB_AGREEMENT_DESCRIPTION =
@ -465,7 +496,16 @@ export class ControllerRoute {
'This endpoint retrieves the terms and conditions for the application.';
};
};
static WEATHER = class {
public static readonly ROUTE = 'weather';
static ACTIONS = class {
public static readonly FETCH_WEATHER_DETAILS_SUMMARY =
'Fetch Weather Details';
public static readonly FETCH_WEATHER_DETAILS_DESCRIPTION =
'This endpoint retrieves the current weather details for a specified location like temperature, humidity, etc.';
};
};
static PRIVACY_POLICY = class {
public static readonly ROUTE = 'policy';
@ -492,12 +532,40 @@ export class ControllerRoute {
};
static PowerClamp = class {
public static readonly ROUTE = 'power-clamp';
static ACTIONS = class {
public static readonly GET_ENERGY_SUMMARY =
'Get power clamp historical data';
public static readonly GET_ENERGY_DESCRIPTION =
'This endpoint retrieves the historical data of a power clamp device based on the provided parameters.';
public static readonly GET_ENERGY_BY_COMMUNITY_OR_SPACE_SUMMARY =
'Get power clamp historical data by community or space';
public static readonly GET_ENERGY_BY_COMMUNITY_OR_SPACE_DESCRIPTION =
'This endpoint retrieves the historical data of power clamp devices based on the provided community or space UUID.';
};
};
static Occupancy = class {
public static readonly ROUTE = 'occupancy';
static ACTIONS = class {
public static readonly GET_OCCUPANCY_HEAT_MAP_SUMMARY =
'Get occupancy heat map data';
public static readonly GET_OCCUPANCY_HEAT_MAP_DESCRIPTION =
'This endpoint retrieves the occupancy heat map data based on the provided parameters.';
};
};
static AQI = class {
public static readonly ROUTE = 'aqi';
static ACTIONS = class {
public static readonly GET_AQI_RANGE_DATA_SUMMARY = 'Get AQI range data';
public static readonly GET_AQI_RANGE_DATA_DESCRIPTION =
'This endpoint retrieves the AQI (Air Quality Index) range data based on the provided parameters.';
public static readonly GET_AQI_DISTRIBUTION_DATA_SUMMARY =
'Get AQI distribution data';
public static readonly GET_AQI_DISTRIBUTION_DATA_DESCRIPTION =
'This endpoint retrieves the AQI (Air Quality Index) distribution data based on the provided parameters.';
};
};
static DEVICE = class {
@ -590,6 +658,11 @@ export class ControllerRoute {
'Delete scenes by device uuid and switch name';
public static readonly DELETE_SCENES_BY_SWITCH_NAME_DESCRIPTION =
'This endpoint deletes all scenes associated with a specific switch device.';
public static readonly POPULATE_TUYA_CONST_UUID_SUMMARY =
'Populate Tuya const UUID';
public static readonly POPULATE_TUYA_CONST_UUID_DESCRIPTION =
'This endpoint populates the Tuya const UUID for all devices.';
};
};
static DEVICE_COMMISSION = class {
@ -609,6 +682,17 @@ export class ControllerRoute {
'This endpoint retrieves all devices in the system.';
};
};
static DEVICE_SPACE_COMMUNITY = class {
public static readonly ROUTE = 'devices-space-community';
static ACTIONS = class {
public static readonly GET_ALL_DEVICES_BY_SPACE_OR_COMMUNITY_WITH_RECURSIVE_CHILD_SUMMARY =
'Get all devices by space or community with recursive child';
public static readonly GET_ALL_DEVICES_BY_SPACE_OR_COMMUNITY_WITH_RECURSIVE_CHILD_DESCRIPTION =
'This endpoint retrieves all devices in the system by space or community with recursive child.';
};
};
static DEVICE_PERMISSION = class {
public static readonly ROUTE = 'device-permission';

View File

@ -0,0 +1,8 @@
export enum PollutantType {
AQI = 'aqi',
PM25 = 'pm25',
PM10 = 'pm10',
VOC = 'voc',
CO2 = 'co2',
CH2O = 'ch2o',
}

View File

@ -0,0 +1,4 @@
export enum PresenceSensorEnum {
PRESENCE_STATE = 'presence_state',
SENSITIVITY = 'sensitivity',
}

View File

@ -15,8 +15,10 @@ export enum ProductType {
WL = 'WL',
GD = 'GD',
CUR = 'CUR',
CUR_2 = 'CUR_2',
PC = 'PC',
FOUR_S = '4S',
SIX_S = '6S',
SOS = 'SOS',
AQI = 'AQI',
}

View File

@ -1,56 +1,64 @@
import { Module } from '@nestjs/common';
import { ConfigModule, ConfigService } from '@nestjs/config';
import { TypeOrmModule } from '@nestjs/typeorm';
import { SnakeNamingStrategy } from './strategies';
import { UserEntity } from '../modules/user/entities/user.entity';
import { UserSessionEntity } from '../modules/session/entities/session.entity';
import { UserOtpEntity } from '../modules/user/entities';
import { ProductEntity } from '../modules/product/entities';
import { DeviceEntity } from '../modules/device/entities';
import { PermissionTypeEntity } from '../modules/permission/entities';
import { ProductEntity } from '../modules/product/entities';
import { UserSessionEntity } from '../modules/session/entities/session.entity';
import { UserOtpEntity } from '../modules/user/entities';
import { UserEntity } from '../modules/user/entities/user.entity';
import { SnakeNamingStrategy } from './strategies';
import { UserSpaceEntity } from '../modules/user/entities';
import { DeviceUserPermissionEntity } from '../modules/device/entities';
import { RoleTypeEntity } from '../modules/role-type/entities';
import { UserNotificationEntity } from '../modules/user/entities';
import { DeviceNotificationEntity } from '../modules/device/entities';
import { RegionEntity } from '../modules/region/entities';
import { TimeZoneEntity } from '../modules/timezone/entities';
import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger';
import { createLogger } from 'winston';
import { winstonLoggerOptions } from '../logger/services/winston.logger';
import { AqiSpaceDailyPollutantStatsEntity } from '../modules/aqi/entities';
import { AutomationEntity } from '../modules/automation/entities';
import { ClientEntity } from '../modules/client/entities';
import { CommunityEntity } from '../modules/community/entities';
import { DeviceStatusLogEntity } from '../modules/device-status-log/entities';
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
import { SceneDeviceEntity } from '../modules/scene-device/entities';
import { ProjectEntity } from '../modules/project/entities';
import {
SpaceModelEntity,
SubspaceModelEntity,
TagModel,
SpaceModelProductAllocationEntity,
SubspaceModelProductAllocationEntity,
} from '../modules/space-model/entities';
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../modules/device/entities';
import {
InviteUserEntity,
InviteUserSpaceEntity,
} from '../modules/Invite-user/entities';
import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
import { AutomationEntity } from '../modules/automation/entities';
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
import { SpaceEntity } from '../modules/space/entities/space.entity';
import { SpaceLinkEntity } from '../modules/space/entities/space-link.entity';
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
import { TagEntity } from '../modules/space/entities/tag.entity';
import { ClientEntity } from '../modules/client/entities';
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger';
import { createLogger } from 'winston';
import { winstonLoggerOptions } from '../logger/services/winston.logger';
import { SpaceDailyOccupancyDurationEntity } from '../modules/occupancy/entities';
import {
PowerClampDailyEntity,
PowerClampHourlyEntity,
PowerClampMonthlyEntity,
} from '../modules/power-clamp/entities/power-clamp.entity';
import {
PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity,
} from '../modules/presence-sensor/entities';
import { ProjectEntity } from '../modules/project/entities';
import { RegionEntity } from '../modules/region/entities';
import { RoleTypeEntity } from '../modules/role-type/entities';
import { SceneDeviceEntity } from '../modules/scene-device/entities';
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
import {
SpaceModelEntity,
SpaceModelProductAllocationEntity,
SubspaceModelEntity,
SubspaceModelProductAllocationEntity,
} from '../modules/space-model/entities';
import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
import { SpaceEntity } from '../modules/space/entities/space.entity';
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
import { TimeZoneEntity } from '../modules/timezone/entities';
import {
UserNotificationEntity,
UserSpaceEntity,
} from '../modules/user/entities';
import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
import { BookableSpaceEntity } from '../modules/booking/entities';
@Module({
imports: [
TypeOrmModule.forRootAsync({
@ -79,9 +87,7 @@ import {
PermissionTypeEntity,
CommunityEntity,
SpaceEntity,
SpaceLinkEntity,
SubspaceEntity,
TagEntity,
UserSpaceEntity,
DeviceUserPermissionEntity,
RoleTypeEntity,
@ -96,7 +102,6 @@ import {
SceneDeviceEntity,
SpaceModelEntity,
SubspaceModelEntity,
TagModel,
InviteUserEntity,
InviteUserSpaceEntity,
InviteSpaceEntity,
@ -109,6 +114,11 @@ import {
PowerClampHourlyEntity,
PowerClampDailyEntity,
PowerClampMonthlyEntity,
PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity,
AqiSpaceDailyPollutantStatsEntity,
SpaceDailyOccupancyDurationEntity,
BookableSpaceEntity,
],
namingStrategy: new SnakeNamingStrategy(),
synchronize: Boolean(JSON.parse(configService.get('DB_SYNC'))),
@ -117,8 +127,8 @@ import {
logger: typeOrmLogger,
extra: {
charset: 'utf8mb4',
max: 20, // set pool max size
idleTimeoutMillis: 5000, // close idle clients after 5 second
max: 100, // set pool max size
idleTimeoutMillis: 3000, // close idle clients after 5 second
connectionTimeoutMillis: 12_000, // return an error after 11 second if connection could not be established
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
},

View File

@ -1,10 +1,9 @@
import { IsBoolean, IsDate, IsOptional } from 'class-validator';
import { IsPageRequestParam } from '../validators/is-page-request-param.validator';
import { ApiProperty } from '@nestjs/swagger';
import { IsSizeRequestParam } from '../validators/is-size-request-param.validator';
import { Transform } from 'class-transformer';
import { parseToDate } from '../util/parseToDate';
import { IsBoolean, IsOptional } from 'class-validator';
import { BooleanValues } from '../constants/boolean-values.enum';
import { IsPageRequestParam } from '../validators/is-page-request-param.validator';
import { IsSizeRequestParam } from '../validators/is-size-request-param.validator';
export class PaginationRequestGetListDto {
@ApiProperty({
@ -19,6 +18,7 @@ export class PaginationRequestGetListDto {
return value.obj.includeSpaces === BooleanValues.TRUE;
})
public includeSpaces?: boolean = false;
@IsOptional()
@IsPageRequestParam({
message: 'Page must be bigger than 0',
@ -40,40 +40,4 @@ export class PaginationRequestGetListDto {
description: 'Size request',
})
size?: number;
@IsOptional()
@ApiProperty({
name: 'name',
required: false,
description: 'Name to be filtered',
})
name?: string;
@ApiProperty({
name: 'from',
required: false,
type: Number,
description: `Start time in UNIX timestamp format to filter`,
example: 1674172800000,
})
@IsOptional()
@Transform(({ value }) => parseToDate(value))
@IsDate({
message: `From must be in UNIX timestamp format in order to parse to Date instance`,
})
from?: Date;
@ApiProperty({
name: 'to',
required: false,
type: Number,
description: `End time in UNIX timestamp format to filter`,
example: 1674259200000,
})
@IsOptional()
@Transform(({ value }) => parseToDate(value))
@IsDate({
message: `To must be in UNIX timestamp format in order to parse to Date instance`,
})
to?: Date;
}

View File

@ -3,24 +3,12 @@ import { DeviceStatusFirebaseController } from './controllers/devices-status.con
import { DeviceStatusFirebaseService } from './services/devices-status.service';
import { DeviceRepository } from '@app/common/modules/device/repositories';
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories/device-status.repository';
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
import {
PowerClampHourlyRepository,
PowerClampDailyRepository,
PowerClampMonthlyRepository,
} from '@app/common/modules/power-clamp/repositories';
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
@Module({
providers: [
DeviceStatusFirebaseService,
DeviceRepository,
DeviceStatusLogRepository,
PowerClampService,
PowerClampHourlyRepository,
PowerClampDailyRepository,
PowerClampMonthlyRepository,
SqlLoaderService,
],
controllers: [DeviceStatusFirebaseController],
exports: [DeviceStatusFirebaseService, DeviceStatusLogRepository],

View File

@ -13,6 +13,7 @@ class StatusDto {
@IsNotEmpty()
value: any;
t?: string | number | Date;
}
export class AddDeviceStatusDto {

View File

@ -18,17 +18,15 @@ import {
runTransaction,
} from 'firebase/database';
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories';
import { ProductType } from '@app/common/constants/product-type.enum';
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
import { PowerClampEnergyEnum } from '@app/common/constants/power.clamp.enargy.enum';
@Injectable()
export class DeviceStatusFirebaseService {
private tuya: TuyaContext;
private firebaseDb: Database;
private readonly isDevEnv: boolean;
constructor(
private readonly configService: ConfigService,
private readonly deviceRepository: DeviceRepository,
private readonly powerClampService: PowerClampService,
private deviceStatusLogRepository: DeviceStatusLogRepository,
) {
const accessKey = this.configService.get<string>('auth-config.ACCESS_KEY');
@ -42,6 +40,8 @@ export class DeviceStatusFirebaseService {
// Initialize firebaseDb using firebaseDataBase function
this.firebaseDb = firebaseDataBase(this.configService);
this.isDevEnv =
this.configService.get<string>('NODE_ENV') === 'development';
}
async addDeviceStatusByDeviceUuid(
deviceTuyaUuid: string,
@ -56,7 +56,7 @@ export class DeviceStatusFirebaseService {
const deviceStatusSaved = await this.createDeviceStatusFirebase({
deviceUuid: device.uuid,
deviceTuyaUuid: deviceTuyaUuid,
status: deviceStatus.status,
status: deviceStatus?.status,
productUuid: deviceStatus.productUuid,
productType: deviceStatus.productType,
});
@ -71,25 +71,94 @@ export class DeviceStatusFirebaseService {
);
}
}
async addBatchDeviceStatusToOurDb(
batch: {
deviceTuyaUuid: string;
status: any;
log: any;
device: any;
}[],
): Promise<void> {
const allLogs = [];
console.log(`🔁 Preparing logs from batch of ${batch.length} items...`);
for (const item of batch) {
const device = item.device;
if (!device?.uuid) {
console.log(`⛔ Skipped unknown device: ${item.deviceTuyaUuid}`);
continue;
}
const logs = item.log.properties.map((property) =>
this.deviceStatusLogRepository.create({
deviceId: device.uuid,
deviceTuyaId: item.deviceTuyaUuid,
productId: item.log.productId,
log: item.log,
code: property.code,
value: property.value,
eventId: item.log.dataId,
eventTime: new Date(property.time).toISOString(),
}),
);
allLogs.push(...logs);
}
console.log(`📝 Total logs to insert: ${allLogs.length}`);
const insertLogsPromise = (async () => {
const chunkSize = 300;
let insertedCount = 0;
for (let i = 0; i < allLogs.length; i += chunkSize) {
const chunk = allLogs.slice(i, i + chunkSize);
try {
const result = await this.deviceStatusLogRepository
.createQueryBuilder()
.insert()
.into('device-status-log') // or use DeviceStatusLogEntity
.values(chunk)
.orIgnore() // skip duplicates
.execute();
insertedCount += result.identifiers.length;
console.log(
`✅ Inserted ${result.identifiers.length} / ${chunk.length} logs (chunk)`,
);
} catch (error) {
console.error('❌ Insert error (skipped chunk):', error.message);
}
}
console.log(
`✅ Total logs inserted: ${insertedCount} / ${allLogs.length}`,
);
})();
await insertLogsPromise;
}
async addDeviceStatusToFirebase(
addDeviceStatusDto: AddDeviceStatusDto,
addDeviceStatusDto: AddDeviceStatusDto & { device?: any },
): Promise<AddDeviceStatusDto | null> {
try {
const device = await this.getDeviceByDeviceTuyaUuid(
addDeviceStatusDto.deviceTuyaUuid,
);
let device = addDeviceStatusDto.device;
if (!device) {
device = await this.getDeviceByDeviceTuyaUuid(
addDeviceStatusDto.deviceTuyaUuid,
);
}
if (device?.uuid) {
return await this.createDeviceStatusFirebase({
deviceUuid: device.uuid,
...addDeviceStatusDto,
productType: device.productDevice.prodType,
productType: device.productDevice?.prodType,
});
}
// Return null if device not found or no UUID
return null;
} catch (error) {
// Handle the error silently, perhaps log it internally or ignore it
return null;
}
}
@ -103,6 +172,15 @@ export class DeviceStatusFirebaseService {
relations: ['productDevice'],
});
}
async getAllDevices() {
return await this.deviceRepository.find({
where: {
isActive: true,
},
relations: ['productDevice'],
});
}
async getDevicesInstructionStatus(deviceUuid: string) {
try {
const deviceDetails = await this.getDeviceByDeviceUuid(deviceUuid);
@ -117,7 +195,7 @@ export class DeviceStatusFirebaseService {
return {
productUuid: deviceDetails.productDevice.uuid,
productType: deviceDetails.productDevice.prodType,
status: deviceStatus.result[0].status,
status: deviceStatus.result[0]?.status,
};
} catch (error) {
throw new HttpException(
@ -182,18 +260,18 @@ export class DeviceStatusFirebaseService {
if (!existingData.productType) {
existingData.productType = addDeviceStatusDto.productType;
}
if (!existingData.status) {
if (!existingData?.status) {
existingData.status = [];
}
// Create a map to track existing status codes
const statusMap = new Map(
existingData.status.map((item) => [item.code, item.value]),
existingData?.status.map((item) => [item.code, item.value]),
);
// Update or add status codes
for (const statusItem of addDeviceStatusDto.status) {
for (const statusItem of addDeviceStatusDto?.status) {
statusMap.set(statusItem.code, statusItem.value);
}
@ -206,40 +284,6 @@ export class DeviceStatusFirebaseService {
return existingData;
});
// Save logs to your repository
const newLogs = addDeviceStatusDto.log.properties.map((property) => {
return this.deviceStatusLogRepository.create({
deviceId: addDeviceStatusDto.deviceUuid,
deviceTuyaId: addDeviceStatusDto.deviceTuyaUuid,
productId: addDeviceStatusDto.log.productId,
log: addDeviceStatusDto.log,
code: property.code,
value: property.value,
eventId: addDeviceStatusDto.log.dataId,
eventTime: new Date(property.time).toISOString(),
});
});
await this.deviceStatusLogRepository.save(newLogs);
if (addDeviceStatusDto.productType === ProductType.PC) {
const energyCodes = new Set([
PowerClampEnergyEnum.ENERGY_CONSUMED,
PowerClampEnergyEnum.ENERGY_CONSUMED_A,
PowerClampEnergyEnum.ENERGY_CONSUMED_B,
PowerClampEnergyEnum.ENERGY_CONSUMED_C,
]);
const energyStatus = addDeviceStatusDto?.log?.properties?.find((status) =>
energyCodes.has(status.code),
);
if (energyStatus) {
await this.powerClampService.updateEnergyConsumedHistoricalData(
addDeviceStatusDto.deviceUuid,
);
}
}
// Return the updated data
const snapshot: DataSnapshot = await get(dataRef);
return snapshot.val();

View File

@ -0,0 +1,38 @@
export function toDDMMYYYY(dateString?: string | null): string | null {
if (!dateString) return null;
// Ensure dateString is valid format YYYY-MM-DD
const regex = /^\d{4}-\d{2}-\d{2}$/;
if (!regex.test(dateString)) {
throw new Error(
`Invalid date format: ${dateString}. Expected format is YYYY-MM-DD`,
);
}
const [year, month, day] = dateString.split('-');
return `${day}-${month}-${year}`;
}
export function toMMYYYY(dateString?: string | null): string | null {
if (!dateString) return null;
// Ensure dateString is valid format YYYY-MM
const regex = /^\d{4}-\d{2}$/;
if (!regex.test(dateString)) {
throw new Error(
`Invalid date format: ${dateString}. Expected format is YYYY-MM`,
);
}
const [year, month] = dateString.split('-');
return `${month}-${year}`;
}
export function filterByMonth(data: any[], monthDate: string) {
const [year, month] = monthDate.split('-').map(Number);
return data.filter((item) => {
const itemDate = new Date(item.date);
return (
itemDate.getUTCFullYear() === year && itemDate.getUTCMonth() + 1 === month
);
});
}

View File

@ -8,7 +8,10 @@ import { TuyaWebSocketService } from './services/tuya.web.socket.service';
import { OneSignalService } from './services/onesignal.service';
import { DeviceMessagesService } from './services/device.messages.service';
import { DeviceRepositoryModule } from '../modules/device/device.repository.module';
import { DeviceNotificationRepository } from '../modules/device/repositories';
import {
DeviceNotificationRepository,
DeviceRepository,
} from '../modules/device/repositories';
import { DeviceStatusFirebaseModule } from '../firebase/devices-status/devices-status.module';
import { CommunityPermissionService } from './services/community.permission.service';
import { CommunityRepository } from '../modules/community/repositories';
@ -27,6 +30,7 @@ import { SosHandlerService } from './services/sos.handler.service';
DeviceNotificationRepository,
CommunityRepository,
SosHandlerService,
DeviceRepository,
],
exports: [
HelperHashService,

View File

@ -0,0 +1,66 @@
import { Injectable } from '@nestjs/common';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable()
export class AqiDataService {
constructor(
private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource,
) {}
async updateAQISensorHistoricalData(): Promise<void> {
try {
const { dateStr } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'proceduce_update_daily_space_aqi',
[dateStr],
'fact_daily_space_aqi',
),
]);
} catch (err) {
console.error('Failed to update AQI sensor historical data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
};
}
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
);
}
console.error(`Failed to execute ${procedureFileName}:`, err);
throw err;
}
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}
}

View File

@ -0,0 +1,71 @@
import { Injectable } from '@nestjs/common';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable()
export class OccupancyService {
constructor(
private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource,
) {}
async updateOccupancyDataProcedures(): Promise<void> {
try {
const { dateStr } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'procedure_update_fact_space_occupancy',
[dateStr],
'fact_space_occupancy_count',
),
this.executeProcedureWithRetry(
'procedure_update_daily_space_occupancy_duration',
[dateStr],
'fact_daily_space_occupancy_duration',
),
]);
} catch (err) {
console.error('Failed to update occupancy data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
};
}
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
);
}
console.error(`Failed to execute ${procedureFileName}:`, err);
throw err;
}
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}
}

View File

@ -1,7 +1,7 @@
import { Injectable } from '@nestjs/common';
import { SqlLoaderService } from './sql-loader.service';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable()
export class PowerClampService {
@ -10,52 +10,75 @@ export class PowerClampService {
private readonly dataSource: DataSource,
) {}
async updateEnergyConsumedHistoricalData(deviceUuid: string): Promise<void> {
async updateEnergyConsumedHistoricalData(): Promise<void> {
try {
const now = new Date();
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
const hour = now.getHours();
const monthYear = now
.toLocaleDateString('en-US', {
month: '2-digit',
year: 'numeric',
})
.replace('/', '-'); // MM-YYYY
const { dateStr, monthYear } = this.getFormattedDates();
await this.executeProcedure(
'fact_hourly_device_energy_consumed_procedure',
[deviceUuid, dateStr, hour],
);
await this.executeProcedure(
'fact_daily_device_energy_consumed_procedure',
[deviceUuid, dateStr],
);
await this.executeProcedure(
'fact_monthly_device_energy_consumed_procedure',
[deviceUuid, monthYear],
);
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'fact_hourly_device_energy_consumed_procedure',
[dateStr],
'fact_device_energy_consumed',
),
this.executeProcedureWithRetry(
'fact_daily_device_energy_consumed_procedure',
[dateStr],
'fact_device_energy_consumed',
),
this.executeProcedureWithRetry(
'fact_monthly_device_energy_consumed_procedure',
[monthYear],
'fact_device_energy_consumed',
),
]);
} catch (err) {
console.error('Failed to insert or update energy data:', err);
console.error('Failed to update energy consumption data:', err);
throw err;
}
}
private async executeProcedure(
procedureFileName: string,
params: (string | number | null)[],
): Promise<void> {
const query = this.loadQuery(procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
private getFormattedDates(): { dateStr: string; monthYear: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
monthYear: now
.toLocaleDateString('en-US', {
month: '2-digit',
year: 'numeric',
})
.replace('/', '-'), // MM-YYYY
};
}
private loadQuery(fileName: string): string {
return this.sqlLoader.loadQuery(
'fact_device_energy_consumed',
fileName,
SQL_PROCEDURES_PATH,
);
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
);
}
console.error(`Failed to execute ${procedureFileName}:`, err);
throw err;
}
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}
}

View File

@ -16,21 +16,46 @@ export class SosHandlerService {
);
}
async handleSosEvent(devId: string, logData: any): Promise<void> {
async handleSosEventFirebase(device: any, logData: any): Promise<void> {
const sosTrueStatus = [{ code: 'sos', value: true }];
const sosFalseStatus = [{ code: 'sos', value: false }];
try {
// ✅ Send true status
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: devId,
status: [{ code: 'sos', value: true }],
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosTrueStatus,
log: logData,
device,
});
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
{
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosTrueStatus,
log: logData,
device,
},
]);
// ✅ Schedule false status
setTimeout(async () => {
try {
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: devId,
status: [{ code: 'sos', value: false }],
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosFalseStatus,
log: logData,
device,
});
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
{
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosFalseStatus,
log: logData,
device,
},
]);
} catch (err) {
this.logger.error('Failed to send SOS false value', err);
}

View File

@ -1,13 +1,24 @@
import { Injectable } from '@nestjs/common';
import { Injectable, OnModuleInit } from '@nestjs/common';
import TuyaWebsocket from '../../config/tuya-web-socket-config';
import { ConfigService } from '@nestjs/config';
import { DeviceStatusFirebaseService } from '@app/common/firebase/devices-status/services/devices-status.service';
import { SosHandlerService } from './sos.handler.service';
import * as NodeCache from 'node-cache';
@Injectable()
export class TuyaWebSocketService {
export class TuyaWebSocketService implements OnModuleInit {
private client: any;
private readonly isDevEnv: boolean;
private readonly deviceCache = new NodeCache({ stdTTL: 7200 }); // TTL = 2 hour
private messageQueue: {
devId: string;
status: any;
logData: any;
device: any;
}[] = [];
private isProcessing = false;
constructor(
private readonly configService: ConfigService,
@ -26,16 +37,36 @@ export class TuyaWebSocketService {
});
if (this.configService.get<string>('tuya-config.TRUN_ON_TUYA_SOCKET')) {
// Set up event handlers
this.setupEventHandlers();
// Start receiving messages
this.client.start();
}
// Run the queue processor every 15 seconds
setInterval(() => this.processQueue(), 15000);
// Refresh the cache every 1 hour
setInterval(() => this.initializeDeviceCache(), 30 * 60 * 1000); // 30 minutes
}
async onModuleInit() {
await this.initializeDeviceCache();
}
private async initializeDeviceCache() {
try {
const allDevices = await this.deviceStatusFirebaseService.getAllDevices();
allDevices.forEach((device) => {
if (device.deviceTuyaUuid) {
this.deviceCache.set(device.deviceTuyaUuid, device);
}
});
console.log(`✅ Refreshed cache with ${allDevices.length} devices.`);
} catch (error) {
console.error('❌ Failed to initialize device cache:', error);
}
}
private setupEventHandlers() {
// Event handlers
this.client.open(() => {
console.log('open');
});
@ -43,23 +74,38 @@ export class TuyaWebSocketService {
this.client.message(async (ws: WebSocket, message: any) => {
try {
const { devId, status, logData } = this.extractMessageData(message);
if (!Array.isArray(logData?.properties)) {
this.client.ackMessage(message.messageId);
return;
}
const device = this.deviceCache.get(devId);
if (!device) {
// console.log(⛔ Unknown device: ${devId}, message ignored.);
this.client.ackMessage(message.messageId);
return;
}
if (this.sosHandlerService.isSosTriggered(status)) {
await this.sosHandlerService.handleSosEvent(devId, logData);
await this.sosHandlerService.handleSosEventFirebase(devId, logData);
} else {
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: devId,
status: status,
status,
log: logData,
device,
});
}
// Push to internal queue
this.messageQueue.push({ devId, status, logData, device });
// Acknowledge the message
this.client.ackMessage(message.messageId);
} catch (error) {
console.error('Error processing message:', error);
console.error('Error receiving message:', error);
}
});
this.client.reconnect(() => {
console.log('reconnect');
});
@ -80,6 +126,37 @@ export class TuyaWebSocketService {
console.error('WebSocket error:', error);
});
}
private async processQueue() {
if (this.isProcessing) {
console.log('⏳ Skipping: still processing previous batch');
return;
}
if (this.messageQueue.length === 0) return;
this.isProcessing = true;
const batch = [...this.messageQueue];
this.messageQueue = [];
console.log(`🔁 Processing batch of size: ${batch.length}`);
try {
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb(
batch.map((item) => ({
deviceTuyaUuid: item.devId,
status: item.status,
log: item.logData,
device: item.device,
})),
);
} catch (error) {
console.error('❌ Error processing batch:', error);
this.messageQueue.unshift(...batch); // retry
} finally {
this.isProcessing = false;
}
}
private extractMessageData(message: any): {
devId: string;
status: any;

View File

@ -0,0 +1,5 @@
// Convert time string (HH:mm) to minutes
export function timeToMinutes(time: string): number {
const [hours, minutes] = time.split(':').map(Number);
return hours * 60 + minutes;
}

View File

@ -49,12 +49,12 @@ export class TuyaService {
path,
});
if (!response.success) {
throw new HttpException(
`Error fetching device details: ${response.msg}`,
HttpStatus.BAD_REQUEST,
);
}
// if (!response.success) {
// throw new HttpException(
// `Error fetching device details: ${response.msg}`,
// HttpStatus.BAD_REQUEST,
// );
// }
return response.result;
}

View File

@ -1,43 +1,26 @@
import { utilities as nestWinstonModuleUtilities } from 'nest-winston';
import * as winston from 'winston';
const environment = process.env.NODE_ENV || 'local';
export const winstonLoggerOptions: winston.LoggerOptions = {
level:
environment === 'local'
? 'debug'
: environment === 'development'
? 'warn'
: 'error',
process.env.AZURE_POSTGRESQL_DATABASE === 'development' ? 'debug' : 'error',
transports: [
new winston.transports.Console({
level:
environment === 'local'
? 'debug'
: environment === 'development'
? 'warn'
: 'error',
format: winston.format.combine(
winston.format.timestamp(),
nestWinstonModuleUtilities.format.nestLike('MyApp', {
prettyPrint: environment === 'local',
prettyPrint: true,
}),
),
}),
// Only create file logs if NOT local
...(environment !== 'local'
? [
new winston.transports.File({
filename: 'logs/error.log',
level: 'error',
format: winston.format.json(),
}),
new winston.transports.File({
filename: 'logs/combined.log',
level: 'info',
format: winston.format.json(),
}),
]
: []),
new winston.transports.File({
filename: 'logs/error.log',
level: 'error',
format: winston.format.json(),
}),
new winston.transports.File({
filename: 'logs/combined.log',
format: winston.format.json(),
}),
],
};

View File

@ -8,14 +8,14 @@ import {
Unique,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { RoleTypeEntity } from '../../role-type/entities';
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { UserEntity } from '../../user/entities';
import { RoleType } from '@app/common/constants/role.type.enum';
import { InviteUserDto, InviteUserSpaceDto } from '../dtos';
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProjectEntity } from '../../project/entities';
import { RoleTypeEntity } from '../../role-type/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { UserEntity } from '../../user/entities';
import { InviteUserDto, InviteUserSpaceDto } from '../dtos';
@Entity({ name: 'invite-user' })
@Unique(['email', 'project'])
@ -82,7 +82,10 @@ export class InviteUserEntity extends AbstractEntity<InviteUserDto> {
onDelete: 'CASCADE',
})
public roleType: RoleTypeEntity;
@OneToOne(() => UserEntity, (user) => user.inviteUser, { nullable: true })
@OneToOne(() => UserEntity, (user) => user.inviteUser, {
nullable: true,
onDelete: 'CASCADE',
})
@JoinColumn({ name: 'user_uuid' })
user: UserEntity;
@OneToMany(
@ -112,7 +115,9 @@ export class InviteUserSpaceEntity extends AbstractEntity<InviteUserSpaceDto> {
})
public uuid: string;
@ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces)
@ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces, {
onDelete: 'CASCADE',
})
@JoinColumn({ name: 'invite_user_uuid' })
public inviteUser: InviteUserEntity;

View File

@ -0,0 +1,11 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { AqiSpaceDailyPollutantStatsEntity } from './entities/aqi.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([AqiSpaceDailyPollutantStatsEntity])],
})
export class AqiRepositoryModule {}

View File

@ -0,0 +1,82 @@
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
export class AqiSpaceDailyPollutantStatsDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsNotEmpty()
@IsString()
spaceUuid: string;
@IsNotEmpty()
@IsString()
eventDay: string;
@IsNotEmpty()
@IsNumber()
eventHour: number;
@IsNumber()
pm1Min: number;
@IsNumber()
pm1Avg: number;
@IsNumber()
pm1Max: number;
@IsNumber()
pm10Min: number;
@IsNumber()
pm10Avg: number;
@IsNumber()
pm10Max: number;
@IsNumber()
pm25Min: number;
@IsNumber()
pm25Avg: number;
@IsNumber()
pm25Max: number;
@IsNumber()
ch2oMin: number;
@IsNumber()
ch2oAvg: number;
@IsNumber()
ch2oMax: number;
@IsNumber()
vocMin: number;
@IsNumber()
vocAvg: number;
@IsNumber()
vocMax: number;
@IsNumber()
co2Min: number;
@IsNumber()
co2Avg: number;
@IsNumber()
co2Max: number;
@IsNumber()
aqiMin: number;
@IsNumber()
aqiAvg: number;
@IsNumber()
aqiMax: number;
}

View File

@ -0,0 +1 @@
export * from './aqi.dto';

View File

@ -0,0 +1,184 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { AqiSpaceDailyPollutantStatsDto } from '../dtos';
@Entity({ name: 'space-daily-pollutant-stats' })
@Unique(['spaceUuid', 'eventDate'])
export class AqiSpaceDailyPollutantStatsEntity extends AbstractEntity<AqiSpaceDailyPollutantStatsDto> {
@Column({ nullable: false })
public spaceUuid: string;
@ManyToOne(() => SpaceEntity, (space) => space.aqiSensorDaily)
space: SpaceEntity;
@Column({ type: 'date', nullable: false })
public eventDate: Date;
@Column('float', { nullable: true })
public goodAqiPercentage?: number;
@Column('float', { nullable: true })
public moderateAqiPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveAqiPercentage?: number;
@Column('float', { nullable: true })
public unhealthyAqiPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyAqiPercentage?: number;
@Column('float', { nullable: true })
public hazardousAqiPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgAqi?: number;
@Column('float', { nullable: true })
public dailyMaxAqi?: number;
@Column('float', { nullable: true })
public dailyMinAqi?: number;
@Column('float', { nullable: true })
public goodPm25Percentage?: number;
@Column('float', { nullable: true })
public moderatePm25Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitivePm25Percentage?: number;
@Column('float', { nullable: true })
public unhealthyPm25Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyPm25Percentage?: number;
@Column('float', { nullable: true })
public hazardousPm25Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgPm25?: number;
@Column('float', { nullable: true })
public dailyMaxPm25?: number;
@Column('float', { nullable: true })
public dailyMinPm25?: number;
@Column('float', { nullable: true })
public goodPm10Percentage?: number;
@Column('float', { nullable: true })
public moderatePm10Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitivePm10Percentage?: number;
@Column('float', { nullable: true })
public unhealthyPm10Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyPm10Percentage?: number;
@Column('float', { nullable: true })
public hazardousPm10Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgPm10?: number;
@Column('float', { nullable: true })
public dailyMaxPm10?: number;
@Column('float', { nullable: true })
public dailyMinPm10?: number;
@Column('float', { nullable: true })
public goodVocPercentage?: number;
@Column('float', { nullable: true })
public moderateVocPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveVocPercentage?: number;
@Column('float', { nullable: true })
public unhealthyVocPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyVocPercentage?: number;
@Column('float', { nullable: true })
public hazardousVocPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgVoc?: number;
@Column('float', { nullable: true })
public dailyMaxVoc?: number;
@Column('float', { nullable: true })
public dailyMinVoc?: number;
@Column('float', { nullable: true })
public goodCo2Percentage?: number;
@Column('float', { nullable: true })
public moderateCo2Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveCo2Percentage?: number;
@Column('float', { nullable: true })
public unhealthyCo2Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyCo2Percentage?: number;
@Column('float', { nullable: true })
public hazardousCo2Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgCo2?: number;
@Column('float', { nullable: true })
public dailyMaxCo2?: number;
@Column('float', { nullable: true })
public dailyMinCo2?: number;
@Column('float', { nullable: true })
public goodCh2oPercentage?: number;
@Column('float', { nullable: true })
public moderateCh2oPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveCh2oPercentage?: number;
@Column('float', { nullable: true })
public unhealthyCh2oPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyCh2oPercentage?: number;
@Column('float', { nullable: true })
public hazardousCh2oPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgCh2o?: number;
@Column('float', { nullable: true })
public dailyMaxCh2o?: number;
@Column('float', { nullable: true })
public dailyMinCh2o?: number;
constructor(partial: Partial<AqiSpaceDailyPollutantStatsEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -0,0 +1 @@
export * from './aqi.entity';

View File

@ -0,0 +1,10 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { AqiSpaceDailyPollutantStatsEntity } from '../entities';
@Injectable()
export class AqiSpaceDailyPollutantStatsRepository extends Repository<AqiSpaceDailyPollutantStatsEntity> {
constructor(private dataSource: DataSource) {
super(AqiSpaceDailyPollutantStatsEntity, dataSource.createEntityManager());
}
}

View File

@ -0,0 +1 @@
export * from './aqi.repository';

View File

@ -0,0 +1,11 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { BookableSpaceEntity } from './entities/bookable-space.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([BookableSpaceEntity])],
})
export class BookableRepositoryModule {}

View File

@ -0,0 +1,51 @@
import { DaysEnum } from '@app/common/constants/days.enum';
import {
Column,
CreateDateColumn,
Entity,
JoinColumn,
OneToOne,
UpdateDateColumn,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity('bookable-space')
export class BookableSpaceEntity extends AbstractEntity {
@Column({
type: 'uuid',
default: () => 'gen_random_uuid()',
nullable: false,
})
public uuid: string;
@OneToOne(() => SpaceEntity, (space) => space.bookableConfig)
@JoinColumn({ name: 'space_uuid' })
space: SpaceEntity;
@Column({
type: 'enum',
enum: DaysEnum,
array: true,
nullable: false,
})
daysAvailable: DaysEnum[];
@Column({ type: 'time' })
startTime: string;
@Column({ type: 'time' })
endTime: string;
@Column({ type: Boolean, default: true })
active: boolean;
@Column({ type: 'int', default: null })
points?: number;
@CreateDateColumn()
createdAt: Date;
@UpdateDateColumn()
updatedAt: Date;
}

View File

@ -0,0 +1 @@
export * from './bookable-space.entity';

View File

@ -0,0 +1,10 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { BookableSpaceEntity } from '../entities/bookable-space.entity';
@Injectable()
export class BookableSpaceEntityRepository extends Repository<BookableSpaceEntity> {
constructor(private dataSource: DataSource) {
super(BookableSpaceEntity, dataSource.createEntityManager());
}
}

View File

@ -0,0 +1 @@
export * from './booking.repository';

View File

@ -2,15 +2,15 @@ import { SourceType } from '@app/common/constants/source-type.enum';
import { Entity, Column, PrimaryColumn, Unique } from 'typeorm';
@Entity('device-status-log')
@Unique('event_time_idx', ['eventTime'])
@Unique('event_time_idx', ['eventTime', 'deviceId', 'code', 'value'])
export class DeviceStatusLogEntity {
@Column({ type: 'int', generated: true, unsigned: true })
@PrimaryColumn({ type: 'int', generated: true, unsigned: true })
id: number;
@Column({ type: 'text' })
eventId: string;
@PrimaryColumn({ type: 'timestamptz' })
@Column({ type: 'timestamptz' })
eventTime: Date;
@Column({

View File

@ -1,23 +1,24 @@
import {
Column,
Entity,
Index,
JoinColumn,
ManyToOne,
OneToMany,
Unique,
Index,
JoinColumn,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
import { ProductEntity } from '../../product/entities';
import { UserEntity } from '../../user/entities';
import { DeviceNotificationDto } from '../dtos';
import { PermissionTypeEntity } from '../../permission/entities';
import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
import { PresenceSensorDailyDeviceEntity } from '../../presence-sensor/entities';
import { ProductEntity } from '../../product/entities';
import { SceneDeviceEntity } from '../../scene-device/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SubspaceEntity } from '../../space/entities/subspace/subspace.entity';
import { NewTagEntity } from '../../tag';
import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
import { UserEntity } from '../../user/entities';
import { DeviceNotificationDto } from '../dtos';
import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
@Entity({ name: 'device' })
@Unique(['deviceTuyaUuid'])
@ -27,6 +28,11 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
})
deviceTuyaUuid: string;
@Column({
nullable: true,
})
deviceTuyaConstUuid: string;
@Column({
nullable: true,
default: true,
@ -77,11 +83,13 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
@OneToMany(() => SceneDeviceEntity, (sceneDevice) => sceneDevice.device, {})
sceneDevices: SceneDeviceEntity[];
@OneToMany(() => NewTagEntity, (tag) => tag.devices)
// @JoinTable({ name: 'device_tags' })
@ManyToOne(() => NewTagEntity, (tag) => tag.devices)
@JoinColumn({ name: 'tag_uuid' })
public tag: NewTagEntity;
@OneToMany(() => PowerClampHourlyEntity, (powerClamp) => powerClamp.device)
powerClampHourly: PowerClampHourlyEntity[];
@OneToMany(() => PresenceSensorDailyDeviceEntity, (sensor) => sensor.device)
presenceSensorDaily: PresenceSensorDailyDeviceEntity[];
constructor(partial: Partial<DeviceEntity>) {
super();
Object.assign(this, partial);
@ -108,6 +116,7 @@ export class DeviceNotificationEntity extends AbstractEntity<DeviceNotificationD
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity;
@ -146,6 +155,7 @@ export class DeviceUserPermissionEntity extends AbstractEntity<DeviceUserPermiss
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity;
constructor(partial: Partial<DeviceUserPermissionEntity>) {

View File

@ -0,0 +1 @@
export * from './occupancy.dto';

View File

@ -0,0 +1,23 @@
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
export class SpaceDailyOccupancyDurationDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsString()
@IsNotEmpty()
public spaceUuid: string;
@IsString()
@IsNotEmpty()
public eventDate: string;
@IsNumber()
@IsNotEmpty()
public occupancyPercentage: number;
@IsNumber()
@IsNotEmpty()
public occupiedSeconds: number;
}

View File

@ -0,0 +1 @@
export * from './occupancy.entity';

View File

@ -0,0 +1,32 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SpaceDailyOccupancyDurationDto } from '../dtos';
@Entity({ name: 'space-daily-occupancy-duration' })
@Unique(['spaceUuid', 'eventDate'])
export class SpaceDailyOccupancyDurationEntity extends AbstractEntity<SpaceDailyOccupancyDurationDto> {
@Column({ nullable: false })
public spaceUuid: string;
@Column({ nullable: false, type: 'date' })
public eventDate: string;
public CountTotalPresenceDetected: number;
@ManyToOne(() => SpaceEntity, (space) => space.presenceSensorDaily)
space: SpaceEntity;
@Column({ type: 'int' })
occupancyPercentage: number;
@Column({ type: 'int', nullable: true })
occupiedSeconds?: number;
@Column({ type: 'int', nullable: true })
deviceCount?: number;
constructor(partial: Partial<SpaceDailyOccupancyDurationEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -0,0 +1,11 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { SpaceDailyOccupancyDurationEntity } from './entities/occupancy.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([SpaceDailyOccupancyDurationEntity])],
})
export class SpaceDailyOccupancyDurationRepositoryModule {}

View File

@ -0,0 +1 @@
export * from './occupancy.repository';

View File

@ -0,0 +1,10 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { SpaceDailyOccupancyDurationEntity } from '../entities/occupancy.entity';
@Injectable()
export class SpaceDailyOccupancyDurationEntityRepository extends Repository<SpaceDailyOccupancyDurationEntity> {
constructor(private dataSource: DataSource) {
super(SpaceDailyOccupancyDurationEntity, dataSource.createEntityManager());
}
}

View File

@ -0,0 +1 @@
export * from './presence-sensor.dto';

View File

@ -0,0 +1,27 @@
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
export class PresenceSensorDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsString()
@IsNotEmpty()
public deviceUuid: string;
@IsString()
@IsNotEmpty()
public eventDate: string;
@IsNumber()
@IsNotEmpty()
public CountMotionDetected: number;
@IsNumber()
@IsNotEmpty()
public CountPresenceDetected: number;
@IsNumber()
@IsNotEmpty()
public CountTotalPresenceDetected: number;
}

View File

@ -0,0 +1 @@
export * from './presence-sensor.entity';

View File

@ -0,0 +1,58 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { PresenceSensorDto } from '../dtos';
import { DeviceEntity } from '../../device/entities/device.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity({ name: 'presence-sensor-daily-device-detection' })
@Unique(['deviceUuid', 'eventDate'])
export class PresenceSensorDailyDeviceEntity extends AbstractEntity<PresenceSensorDto> {
@Column({ nullable: false })
public deviceUuid: string;
@Column({ nullable: false, type: 'date' })
public eventDate: string;
@Column({ nullable: false })
public CountMotionDetected: number;
@Column({ nullable: false })
public CountPresenceDetected: number;
@Column({ nullable: false })
public CountTotalPresenceDetected: number;
@ManyToOne(() => DeviceEntity, (device) => device.presenceSensorDaily)
device: DeviceEntity;
constructor(partial: Partial<PresenceSensorDailyDeviceEntity>) {
super();
Object.assign(this, partial);
}
}
@Entity({ name: 'presence-sensor-daily-space-detection' })
@Unique(['spaceUuid', 'eventDate'])
export class PresenceSensorDailySpaceEntity extends AbstractEntity<PresenceSensorDto> {
@Column({ nullable: false })
public spaceUuid: string;
@Column({ nullable: false, type: 'date' })
public eventDate: string;
@Column({ nullable: false })
public CountMotionDetected: number;
@Column({ nullable: false })
public CountPresenceDetected: number;
@Column({ nullable: false })
public CountTotalPresenceDetected: number;
@ManyToOne(() => SpaceEntity, (space) => space.presenceSensorDaily)
space: SpaceEntity;
constructor(partial: Partial<PresenceSensorDailySpaceEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -0,0 +1,19 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import {
PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity,
} from './entities/presence-sensor.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [
TypeOrmModule.forFeature([
PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity,
]),
],
})
export class PresenceSensorRepositoryModule {}

View File

@ -0,0 +1 @@
export * from './presence-sensor.repository';

View File

@ -0,0 +1,19 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import {
PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity,
} from '../entities';
@Injectable()
export class PresenceSensorDailyDeviceRepository extends Repository<PresenceSensorDailyDeviceEntity> {
constructor(private dataSource: DataSource) {
super(PresenceSensorDailyDeviceEntity, dataSource.createEntityManager());
}
}
@Injectable()
export class PresenceSensorDailySpaceRepository extends Repository<PresenceSensorDailySpaceEntity> {
constructor(private dataSource: DataSource) {
super(PresenceSensorDailySpaceEntity, dataSource.createEntityManager());
}
}

View File

@ -1,10 +1,7 @@
import { Column, Entity, OneToMany } from 'typeorm';
import { ProductDto } from '../dtos';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { DeviceEntity } from '../../device/entities';
import { TagModel } from '../../space-model';
import { TagEntity } from '../../space/entities/tag.entity';
import { NewTagEntity } from '../../tag/entities';
import { ProductDto } from '../dtos';
@Entity({ name: 'product' })
export class ProductEntity extends AbstractEntity<ProductDto> {
@Column({
@ -28,15 +25,6 @@ export class ProductEntity extends AbstractEntity<ProductDto> {
})
public prodType: string;
@OneToMany(() => NewTagEntity, (tag) => tag.product, { cascade: true })
public newTags: NewTagEntity[];
@OneToMany(() => TagModel, (tag) => tag.product)
tagModels: TagModel[];
@OneToMany(() => TagEntity, (tag) => tag.product)
tags: TagEntity[];
@OneToMany(
() => DeviceEntity,
(devicesProductEntity) => devicesProductEntity.productDevice,

View File

@ -12,6 +12,7 @@ export class RoleTypeEntity extends AbstractEntity<RoleTypeDto> {
nullable: false,
enum: Object.values(RoleType),
})
// why is this ts-type string not enum?
type: string;
@OneToMany(() => UserEntity, (inviteUser) => inviteUser.roleType, {
nullable: true,

View File

@ -1,21 +0,0 @@
import { IsNotEmpty, IsString } from 'class-validator';
export class TagModelDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsString()
@IsNotEmpty()
public name: string;
@IsString()
@IsNotEmpty()
public productUuid: string;
@IsString()
spaceModelUuid: string;
@IsString()
subspaceModelUuid: string;
}

View File

@ -1,4 +1,3 @@
export * from './space-model-product-allocation.entity';
export * from './space-model.entity';
export * from './subspace-model';
export * from './tag-model.entity';
export * from './space-model-product-allocation.entity';

View File

@ -1,18 +1,12 @@
import {
Entity,
Column,
ManyToOne,
ManyToMany,
JoinTable,
OneToMany,
} from 'typeorm';
import { SpaceModelEntity } from './space-model.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { SpaceProductAllocationEntity } from '../../space/entities/space-product-allocation.entity';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { SpaceModelEntity } from './space-model.entity';
@Entity({ name: 'space_model_product_allocation' })
@Unique(['spaceModel', 'product', 'tag'])
export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModelProductAllocationEntity> {
@Column({
type: 'uuid',
@ -31,9 +25,8 @@ export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModel
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToMany(() => NewTagEntity, { cascade: true, onDelete: 'CASCADE' })
@JoinTable({ name: 'space_model_product_tags' })
public tags: NewTagEntity[];
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
@OneToMany(
() => SpaceProductAllocationEntity,

View File

@ -1,11 +1,10 @@
import { Entity, Column, OneToMany, ManyToOne, JoinColumn } from 'typeorm';
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceModelDto } from '../dtos';
import { SubspaceModelEntity } from './subspace-model';
import { ProjectEntity } from '../../project/entities';
import { TagModel } from './tag-model.entity';
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SpaceModelDto } from '../dtos';
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
import { SubspaceModelEntity } from './subspace-model';
@Entity({ name: 'space-model' })
export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
@ -49,9 +48,6 @@ export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
})
public spaces: SpaceEntity[];
@OneToMany(() => TagModel, (tag) => tag.spaceModel)
tags: TagModel[];
@OneToMany(
() => SpaceModelProductAllocationEntity,
(allocation) => allocation.spaceModel,

View File

@ -1,11 +1,12 @@
import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
import { SubspaceModelEntity } from './subspace-model.entity';
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { ProductEntity } from '@app/common/modules/product/entities/product.entity';
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { SubspaceModelProductAllocationDto } from '../../dtos/subspace-model/subspace-model-product-allocation.dto';
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { SubspaceModelEntity } from './subspace-model.entity';
@Entity({ name: 'subspace_model_product_allocation' })
@Unique(['subspaceModel', 'product', 'tag'])
export class SubspaceModelProductAllocationEntity extends AbstractEntity<SubspaceModelProductAllocationDto> {
@Column({
type: 'uuid',
@ -27,12 +28,8 @@ export class SubspaceModelProductAllocationEntity extends AbstractEntity<Subspac
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToMany(() => NewTagEntity, (tag) => tag.subspaceModelAllocations, {
cascade: true,
onDelete: 'CASCADE',
})
@JoinTable({ name: 'subspace_model_product_tags' })
public tags: NewTagEntity[];
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
constructor(partial: Partial<SubspaceModelProductAllocationEntity>) {
super();

View File

@ -1,10 +1,9 @@
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
import { Column, Entity, ManyToOne, OneToMany } from 'typeorm';
import { SubSpaceModelDto } from '../../dtos';
import { SpaceModelEntity } from '../space-model.entity';
import { TagModel } from '../tag-model.entity';
import { SubspaceModelProductAllocationEntity } from './subspace-model-product-allocation.entity';
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
@Entity({ name: 'subspace-model' })
export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
@ -41,9 +40,6 @@ export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
})
public disabled: boolean;
@OneToMany(() => TagModel, (tag) => tag.subspaceModel)
tags: TagModel[];
@OneToMany(
() => SubspaceModelProductAllocationEntity,
(allocation) => allocation.subspaceModel,

View File

@ -1,38 +0,0 @@
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { TagModelDto } from '../dtos/tag-model.dto';
import { SpaceModelEntity } from './space-model.entity';
import { SubspaceModelEntity } from './subspace-model';
import { ProductEntity } from '../../product/entities';
import { TagEntity } from '../../space/entities/tag.entity';
@Entity({ name: 'tag_model' })
export class TagModel extends AbstractEntity<TagModelDto> {
@Column({ type: 'varchar', length: 255 })
tag: string;
@ManyToOne(() => ProductEntity, (product) => product.tagModels, {
nullable: false,
})
@JoinColumn({ name: 'product_id' })
product: ProductEntity;
@ManyToOne(() => SpaceModelEntity, (space) => space.tags, { nullable: true })
@JoinColumn({ name: 'space_model_id' })
spaceModel: SpaceModelEntity;
@ManyToOne(() => SubspaceModelEntity, (subspace) => subspace.tags, {
nullable: true,
})
@JoinColumn({ name: 'subspace_model_id' })
subspaceModel: SubspaceModelEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@OneToMany(() => TagEntity, (tag) => tag.model)
tags: TagEntity[];
}

View File

@ -1,11 +1,10 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm';
import {
SpaceModelEntity,
SpaceModelProductAllocationEntity,
SubspaceModelEntity,
SubspaceModelProductAllocationEntity,
TagModel,
} from '../entities';
@Injectable()
@ -21,13 +20,6 @@ export class SubspaceModelRepository extends Repository<SubspaceModelEntity> {
}
}
@Injectable()
export class TagModelRepository extends Repository<TagModel> {
constructor(private dataSource: DataSource) {
super(TagModel, dataSource.createEntityManager());
}
}
@Injectable()
export class SpaceModelProductAllocationRepoitory extends Repository<SpaceModelProductAllocationEntity> {
constructor(private dataSource: DataSource) {

View File

@ -1,13 +1,11 @@
import { TypeOrmModule } from '@nestjs/typeorm';
import { SpaceModelEntity, SubspaceModelEntity, TagModel } from './entities';
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { SpaceModelEntity, SubspaceModelEntity } from './entities';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [
TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity, TagModel]),
],
imports: [TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity])],
})
export class SpaceModelRepositoryModule {}

View File

@ -1,32 +0,0 @@
import { Column, Entity, JoinColumn, ManyToOne } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from './space.entity';
import { Direction } from '@app/common/constants/direction.enum';
@Entity({ name: 'space-link' })
export class SpaceLinkEntity extends AbstractEntity {
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
@JoinColumn({ name: 'start_space_id' })
public startSpace: SpaceEntity;
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
@JoinColumn({ name: 'end_space_id' })
public endSpace: SpaceEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@Column({
nullable: false,
enum: Object.values(Direction),
})
direction: string;
constructor(partial: Partial<SpaceLinkEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -1,12 +1,13 @@
import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
import { SpaceEntity } from './space.entity';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { SpaceProductAllocationDto } from '../dtos/space-product-allocation.dto';
import { SpaceEntity } from './space.entity';
@Entity({ name: 'space_product_allocation' })
@Unique(['space', 'product', 'tag'], {})
export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAllocationDto> {
@Column({
type: 'uuid',
@ -30,9 +31,8 @@ export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAll
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToMany(() => NewTagEntity)
@JoinTable({ name: 'space_product_tags' })
public tags: NewTagEntity[];
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
constructor(partial: Partial<SpaceProductAllocationEntity>) {
super();

View File

@ -1,13 +1,23 @@
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { SpaceDto } from '../dtos';
import {
Column,
Entity,
JoinColumn,
ManyToOne,
OneToMany,
OneToOne,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { UserSpaceEntity } from '../../user/entities';
import { DeviceEntity } from '../../device/entities';
import { AqiSpaceDailyPollutantStatsEntity } from '../../aqi/entities';
import { BookableSpaceEntity } from '../../booking/entities';
import { CommunityEntity } from '../../community/entities';
import { SpaceLinkEntity } from './space-link.entity';
import { DeviceEntity } from '../../device/entities';
import { InviteUserSpaceEntity } from '../../Invite-user/entities';
import { SpaceDailyOccupancyDurationEntity } from '../../occupancy/entities';
import { PresenceSensorDailySpaceEntity } from '../../presence-sensor/entities';
import { SceneEntity } from '../../scene/entities';
import { SpaceModelEntity } from '../../space-model';
import { InviteUserSpaceEntity } from '../../Invite-user/entities';
import { UserSpaceEntity } from '../../user/entities';
import { SpaceDto } from '../dtos';
import { SpaceProductAllocationEntity } from './space-product-allocation.entity';
import { SubspaceEntity } from './subspace/subspace.entity';
@ -54,6 +64,12 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
})
public disabled: boolean;
@Column({
nullable: true,
type: Number,
})
public order?: number;
@OneToMany(() => SubspaceEntity, (subspace) => subspace.space, {
nullable: true,
})
@ -72,16 +88,6 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
)
devices: DeviceEntity[];
@OneToMany(() => SpaceLinkEntity, (connection) => connection.startSpace, {
nullable: true,
})
public outgoingConnections: SpaceLinkEntity[];
@OneToMany(() => SpaceLinkEntity, (connection) => connection.endSpace, {
nullable: true,
})
public incomingConnections: SpaceLinkEntity[];
@Column({
nullable: true,
type: 'text',
@ -111,6 +117,21 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
)
public productAllocations: SpaceProductAllocationEntity[];
@OneToMany(() => PresenceSensorDailySpaceEntity, (sensor) => sensor.space)
presenceSensorDaily: PresenceSensorDailySpaceEntity[];
@OneToMany(() => AqiSpaceDailyPollutantStatsEntity, (aqi) => aqi.space)
aqiSensorDaily: AqiSpaceDailyPollutantStatsEntity[];
@OneToMany(
() => SpaceDailyOccupancyDurationEntity,
(occupancy) => occupancy.space,
)
occupancyDaily: SpaceDailyOccupancyDurationEntity[];
@OneToOne(() => BookableSpaceEntity, (bookable) => bookable.space)
bookableConfig: BookableSpaceEntity;
constructor(partial: Partial<SpaceEntity>) {
super();
Object.assign(this, partial);

View File

@ -1,20 +1,13 @@
import {
Entity,
Column,
ManyToOne,
ManyToMany,
JoinTable,
Unique,
} from 'typeorm';
import { SubspaceEntity } from './subspace.entity';
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { ProductEntity } from '@app/common/modules/product/entities';
import { SubspaceModelProductAllocationEntity } from '@app/common/modules/space-model';
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { SubspaceProductAllocationDto } from '../../dtos/subspace-product-allocation.dto';
import { SubspaceEntity } from './subspace.entity';
@Entity({ name: 'subspace_product_allocation' })
@Unique(['subspace', 'product'])
@Unique(['subspace', 'product', 'tag'])
export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProductAllocationDto> {
@Column({
type: 'uuid',
@ -38,9 +31,8 @@ export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProd
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToMany(() => NewTagEntity)
@JoinTable({ name: 'subspace_product_tags' })
public tags: NewTagEntity[];
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
constructor(partial: Partial<SubspaceProductAllocationEntity>) {
super();

View File

@ -4,7 +4,6 @@ import { SubspaceModelEntity } from '@app/common/modules/space-model';
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { SubspaceDto } from '../../dtos';
import { SpaceEntity } from '../space.entity';
import { TagEntity } from '../tag.entity';
import { SubspaceProductAllocationEntity } from './subspace-product-allocation.entity';
@Entity({ name: 'subspace' })
@ -43,9 +42,6 @@ export class SubspaceEntity extends AbstractEntity<SubspaceDto> {
})
subSpaceModel?: SubspaceModelEntity;
@OneToMany(() => TagEntity, (tag) => tag.subspace)
tags: TagEntity[];
@OneToMany(
() => SubspaceProductAllocationEntity,
(allocation) => allocation.subspace,

View File

@ -1,41 +0,0 @@
import { Entity, Column, ManyToOne, JoinColumn, OneToOne } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProductEntity } from '../../product/entities';
import { TagDto } from '../dtos';
import { TagModel } from '../../space-model/entities/tag-model.entity';
import { DeviceEntity } from '../../device/entities';
import { SubspaceEntity } from './subspace/subspace.entity';
@Entity({ name: 'tag' })
export class TagEntity extends AbstractEntity<TagDto> {
@Column({ type: 'varchar', length: 255, nullable: true })
tag: string;
@ManyToOne(() => TagModel, (model) => model.tags, {
nullable: true,
})
model: TagModel;
@ManyToOne(() => ProductEntity, (product) => product.tags, {
nullable: false,
})
product: ProductEntity;
@ManyToOne(() => SubspaceEntity, (subspace) => subspace.tags, {
nullable: true,
})
@JoinColumn({ name: 'subspace_id' })
subspace: SubspaceEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@OneToOne(() => DeviceEntity, (device) => device.tag, {
nullable: true,
})
@JoinColumn({ name: 'device_id' })
device: DeviceEntity;
}

View File

@ -1,10 +1,8 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm';
import { InviteSpaceEntity } from '../entities/invite-space.entity';
import { SpaceLinkEntity } from '../entities/space-link.entity';
import { SpaceEntity } from '../entities/space.entity';
import { TagEntity } from '../entities/tag.entity';
import { SpaceProductAllocationEntity } from '../entities/space-product-allocation.entity';
import { SpaceEntity } from '../entities/space.entity';
@Injectable()
export class SpaceRepository extends Repository<SpaceEntity> {
@ -13,20 +11,6 @@ export class SpaceRepository extends Repository<SpaceEntity> {
}
}
@Injectable()
export class SpaceLinkRepository extends Repository<SpaceLinkEntity> {
constructor(private dataSource: DataSource) {
super(SpaceLinkEntity, dataSource.createEntityManager());
}
}
@Injectable()
export class TagRepository extends Repository<TagEntity> {
constructor(private dataSource: DataSource) {
super(TagEntity, dataSource.createEntityManager());
}
}
@Injectable()
export class InviteSpaceRepository extends Repository<InviteSpaceEntity> {
constructor(private dataSource: DataSource) {

View File

@ -6,7 +6,6 @@ import { SpaceProductAllocationEntity } from './entities/space-product-allocatio
import { SpaceEntity } from './entities/space.entity';
import { SubspaceProductAllocationEntity } from './entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from './entities/subspace/subspace.entity';
import { TagEntity } from './entities/tag.entity';
@Module({
providers: [],
@ -16,7 +15,6 @@ import { TagEntity } from './entities/tag.entity';
TypeOrmModule.forFeature([
SpaceEntity,
SubspaceEntity,
TagEntity,
InviteSpaceEntity,
SpaceProductAllocationEntity,
SubspaceProductAllocationEntity,

View File

@ -1,11 +1,10 @@
import { Entity, Column, ManyToOne, Unique, ManyToMany } from 'typeorm';
import { ProductEntity } from '../../product/entities';
import { ProjectEntity } from '../../project/entities';
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { NewTagDto } from '../dtos/tag.dto';
import { DeviceEntity } from '../../device/entities/device.entity';
import { ProjectEntity } from '../../project/entities';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { SubspaceModelProductAllocationEntity } from '../../space-model/entities/subspace-model/subspace-model-product-allocation.entity';
import { DeviceEntity } from '../../device/entities/device.entity';
import { NewTagDto } from '../dtos/tag.dto';
@Entity({ name: 'new_tag' })
@Unique(['name', 'project'])
@ -24,31 +23,25 @@ export class NewTagEntity extends AbstractEntity<NewTagDto> {
})
name: string;
@ManyToOne(() => ProductEntity, (product) => product.newTags, {
nullable: false,
onDelete: 'CASCADE',
})
public product: ProductEntity;
@ManyToOne(() => ProjectEntity, (project) => project.tags, {
nullable: false,
onDelete: 'CASCADE',
})
public project: ProjectEntity;
@ManyToMany(
@OneToMany(
() => SpaceModelProductAllocationEntity,
(allocation) => allocation.tags,
(allocation) => allocation.tag,
)
public spaceModelAllocations: SpaceModelProductAllocationEntity[];
@ManyToMany(
@OneToMany(
() => SubspaceModelProductAllocationEntity,
(allocation) => allocation.tags,
(allocation) => allocation.tag,
)
public subspaceModelAllocations: SubspaceModelProductAllocationEntity[];
@ManyToOne(() => DeviceEntity, (device) => device.tag)
@OneToMany(() => DeviceEntity, (device) => device.tag)
public devices: DeviceEntity[];
constructor(partial: Partial<NewTagEntity>) {

View File

@ -1,3 +1,4 @@
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
import {
Column,
DeleteDateColumn,
@ -8,27 +9,26 @@ import {
OneToOne,
Unique,
} from 'typeorm';
import { OtpType } from '../../../../src/constants/otp-type.enum';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ClientEntity } from '../../client/entities';
import {
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../../device/entities';
import { InviteUserEntity } from '../../Invite-user/entities';
import { ProjectEntity } from '../../project/entities';
import { RegionEntity } from '../../region/entities';
import { RoleTypeEntity } from '../../role-type/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { TimeZoneEntity } from '../../timezone/entities';
import { VisitorPasswordEntity } from '../../visitor-password/entities';
import {
UserDto,
UserNotificationDto,
UserOtpDto,
UserSpaceDto,
} from '../dtos';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import {
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../../device/entities';
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
import { RegionEntity } from '../../region/entities';
import { TimeZoneEntity } from '../../timezone/entities';
import { OtpType } from '../../../../src/constants/otp-type.enum';
import { RoleTypeEntity } from '../../role-type/entities';
import { VisitorPasswordEntity } from '../../visitor-password/entities';
import { InviteUserEntity } from '../../Invite-user/entities';
import { ProjectEntity } from '../../project/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { ClientEntity } from '../../client/entities';
@Entity({ name: 'user' })
export class UserEntity extends AbstractEntity<UserDto> {
@ -82,6 +82,12 @@ export class UserEntity extends AbstractEntity<UserDto> {
})
public isActive: boolean;
@Column({
nullable: true,
type: Number,
})
public bookingPoints?: number;
@Column({ default: false })
hasAcceptedWebAgreement: boolean;
@ -94,7 +100,9 @@ export class UserEntity extends AbstractEntity<UserDto> {
@Column({ type: 'timestamp', nullable: true })
appAgreementAcceptedAt: Date;
@OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user)
@OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user, {
onDelete: 'CASCADE',
})
userSpaces: UserSpaceEntity[];
@OneToMany(
@ -158,6 +166,7 @@ export class UserEntity extends AbstractEntity<UserDto> {
export class UserNotificationEntity extends AbstractEntity<UserNotificationDto> {
@ManyToOne(() => UserEntity, (user) => user.roleType, {
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity;
@Column({
@ -219,7 +228,10 @@ export class UserSpaceEntity extends AbstractEntity<UserSpaceDto> {
})
public uuid: string;
@ManyToOne(() => UserEntity, (user) => user.userSpaces, { nullable: false })
@ManyToOne(() => UserEntity, (user) => user.userSpaces, {
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity;
@ManyToOne(() => SpaceEntity, (space) => space.userSpaces, {

View File

@ -1,7 +1,7 @@
import { Column, Entity, ManyToOne, JoinColumn, Index } from 'typeorm';
import { VisitorPasswordDto } from '../dtos';
import { Column, Entity, Index, JoinColumn, ManyToOne } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { UserEntity } from '../../user/entities/user.entity';
import { VisitorPasswordDto } from '../dtos';
@Entity({ name: 'visitor-password' })
@Index('IDX_PASSWORD_TUYA_UUID', ['passwordTuyaUuid'])
@ -14,6 +14,7 @@ export class VisitorPasswordEntity extends AbstractEntity<VisitorPasswordDto> {
@ManyToOne(() => UserEntity, (user) => user.visitorPasswords, {
nullable: false,
onDelete: 'CASCADE',
})
@JoinColumn({ name: 'authorizer_uuid' })
public user: UserEntity;

View File

@ -0,0 +1,39 @@
WITH params AS (
SELECT
$1::uuid AS space_uuid,
TO_DATE(NULLIF($2, ''), 'YYYY-MM') AS event_month
)
SELECT
sdp.space_uuid,
sdp.event_date,
sdp.good_aqi_percentage, sdp.moderate_aqi_percentage, sdp.unhealthy_sensitive_aqi_percentage, sdp.unhealthy_aqi_percentage,
sdp.very_unhealthy_aqi_percentage, sdp.hazardous_aqi_percentage,
sdp.daily_avg_aqi, sdp.daily_max_aqi, sdp.daily_min_aqi,
sdp.good_pm25_percentage, sdp.moderate_pm25_percentage, sdp.unhealthy_sensitive_pm25_percentage, sdp.unhealthy_pm25_percentage,
sdp.very_unhealthy_pm25_percentage, sdp.hazardous_pm25_percentage,
sdp.daily_avg_pm25, sdp.daily_max_pm25, sdp.daily_min_pm25,
sdp.good_pm10_percentage, sdp.moderate_pm10_percentage, sdp.unhealthy_sensitive_pm10_percentage, sdp.unhealthy_pm10_percentage,
sdp.very_unhealthy_pm10_percentage, sdp.hazardous_pm10_percentage,
sdp.daily_avg_pm10, sdp.daily_max_pm10, sdp.daily_min_pm10,
sdp.good_voc_percentage, sdp.moderate_voc_percentage, sdp.unhealthy_sensitive_voc_percentage, sdp.unhealthy_voc_percentage,
sdp.very_unhealthy_voc_percentage, sdp.hazardous_voc_percentage,
sdp.daily_avg_voc, sdp.daily_max_voc, sdp.daily_min_voc,
sdp.good_co2_percentage, sdp.moderate_co2_percentage, sdp.unhealthy_sensitive_co2_percentage, sdp.unhealthy_co2_percentage,
sdp.very_unhealthy_co2_percentage, sdp.hazardous_co2_percentage,
sdp.daily_avg_co2, sdp.daily_max_co2, sdp.daily_min_co2,
sdp.good_ch2o_percentage, sdp.moderate_ch2o_percentage, sdp.unhealthy_sensitive_ch2o_percentage, sdp.unhealthy_ch2o_percentage,
sdp.very_unhealthy_ch2o_percentage, sdp.hazardous_ch2o_percentage,
sdp.daily_avg_ch2o, sdp.daily_max_ch2o, sdp.daily_min_ch2o
FROM public."space-daily-pollutant-stats" AS sdp
CROSS JOIN params p
WHERE
(p.space_uuid IS NULL OR sdp.space_uuid = p.space_uuid)
AND (p.event_month IS NULL OR TO_CHAR(sdp.event_date, 'YYYY-MM') = TO_CHAR(p.event_month, 'YYYY-MM'))
ORDER BY sdp.space_uuid, sdp.event_date;

View File

@ -0,0 +1,376 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
),
-- Query Pipeline Starts Here
device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
),
final_data as(
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id
AND p.event_date = a.event_date
JOIN params
ON params.event_date = a.event_date
ORDER BY p.space_id, p.event_date)
INSERT INTO public."space-daily-pollutant-stats" (
space_uuid,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
)
SELECT
space_id,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
daily_max_aqi = EXCLUDED.daily_max_aqi,
daily_min_aqi = EXCLUDED.daily_min_aqi,
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
daily_max_pm25 = EXCLUDED.daily_max_pm25,
daily_min_pm25 = EXCLUDED.daily_min_pm25,
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
daily_max_pm10 = EXCLUDED.daily_max_pm10,
daily_min_pm10 = EXCLUDED.daily_min_pm10,
good_voc_percentage = EXCLUDED.good_voc_percentage,
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
daily_avg_voc = EXCLUDED.daily_avg_voc,
daily_max_voc = EXCLUDED.daily_max_voc,
daily_min_voc = EXCLUDED.daily_min_voc,
good_co2_percentage = EXCLUDED.good_co2_percentage,
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
daily_avg_co2 = EXCLUDED.daily_avg_co2,
daily_max_co2 = EXCLUDED.daily_max_co2,
daily_min_co2 = EXCLUDED.daily_min_co2,
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
daily_min_ch2o = EXCLUDED.daily_min_ch2o;

View File

@ -0,0 +1,367 @@
-- Query Pipeline Starts Here
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
),
final_data as(
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date)
INSERT INTO public."space-daily-pollutant-stats" (
space_uuid,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
)
SELECT
space_id,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
daily_max_aqi = EXCLUDED.daily_max_aqi,
daily_min_aqi = EXCLUDED.daily_min_aqi,
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
daily_max_pm25 = EXCLUDED.daily_max_pm25,
daily_min_pm25 = EXCLUDED.daily_min_pm25,
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
daily_max_pm10 = EXCLUDED.daily_max_pm10,
daily_min_pm10 = EXCLUDED.daily_min_pm10,
good_voc_percentage = EXCLUDED.good_voc_percentage,
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
daily_avg_voc = EXCLUDED.daily_avg_voc,
daily_max_voc = EXCLUDED.daily_max_voc,
daily_min_voc = EXCLUDED.daily_min_voc,
good_co2_percentage = EXCLUDED.good_co2_percentage,
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
daily_avg_co2 = EXCLUDED.daily_avg_co2,
daily_max_co2 = EXCLUDED.daily_max_co2,
daily_min_co2 = EXCLUDED.daily_min_co2,
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
daily_min_ch2o = EXCLUDED.daily_min_ch2o;

View File

@ -0,0 +1,110 @@
WITH presence_logs AS (
SELECT
d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time,
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value
FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id
JOIN product p ON p.uuid = d.product_device_uuid
WHERE l.code = 'presence_state'
AND p.cat_name = 'hps'
),
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none')
presence_intervals AS (
SELECT
space_id,
prev_time AS start_time,
event_time AS end_time
FROM presence_logs
WHERE value = 'none'
AND prev_value = 'presence'
AND prev_time IS NOT NULL
),
-- Split intervals across days
split_intervals AS (
SELECT
space_id,
generate_series(
date_trunc('day', start_time),
date_trunc('day', end_time),
interval '1 day'
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
FROM presence_intervals
),
-- Mark and group overlapping intervals per space per day
ordered_intervals AS (
SELECT
space_id,
event_date,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
FROM split_intervals
),
grouped_intervals AS (
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
),
-- Merge overlapping intervals per group
merged_intervals AS (
SELECT
space_id,
event_date,
MIN(interval_start) AS merged_start,
MAX(interval_end) AS merged_end
FROM grouped_intervals
GROUP BY space_id, event_date, grp
),
-- Sum durations of merged intervals
summed_intervals AS (
SELECT
space_id,
event_date,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
FROM merged_intervals
GROUP BY space_id, event_date
),
final_data AS (
SELECT
space_id,
event_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
FROM summed_intervals
ORDER BY space_id, event_date)
INSERT INTO public."space-daily-occupancy-duration" (
space_uuid,
event_date,
occupied_seconds,
occupancy_percentage
)
select space_id,
event_date,
occupied_seconds,
occupancy_percentage
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
occupancy_percentage = EXCLUDED.occupancy_percentage,
occupied_seconds = EXCLUDED.occupied_seconds;

View File

@ -0,0 +1,17 @@
WITH params AS (
SELECT
$1::uuid AS space_uuid,
TO_DATE(NULLIF($2, ''), 'YYYY-MM') AS event_month
)
SELECT sdo.space_uuid,
event_date,
occupancy_percentage,
occupied_seconds
FROM public."space-daily-occupancy-duration" as sdo
JOIN params P ON true
where (sdo.space_uuid = P.space_uuid
OR P.event_month IS null)
AND TO_CHAR(sdo.event_date, 'YYYY-MM') = TO_CHAR(P.event_month, 'YYYY-MM')
ORDER BY sdo.space_uuid, sdo.event_date;

View File

@ -0,0 +1,107 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
),
presence_logs AS (
SELECT
d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time
FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id
JOIN product p ON p.uuid = d.product_device_uuid
WHERE l.code = 'presence_state'
AND p.cat_name = 'hps'
),
presence_intervals AS (
SELECT
space_id,
prev_time AS start_time,
event_time AS end_time
FROM presence_logs
WHERE value = 'none' AND prev_time IS NOT NULL
),
split_intervals AS (
SELECT
space_id,
generate_series(
date_trunc('day', start_time),
date_trunc('day', end_time),
interval '1 day'
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + INTERVAL '1 day') AS interval_end
FROM presence_intervals
),
ordered_intervals AS (
SELECT
space_id,
event_date,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
FROM split_intervals
),
grouped_intervals AS (
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
),
merged_intervals AS (
SELECT
space_id,
event_date,
MIN(interval_start) AS merged_start,
MAX(interval_end) AS merged_end
FROM grouped_intervals
GROUP BY space_id, event_date, grp
),
summed_intervals AS (
SELECT
space_id,
event_date,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
FROM merged_intervals
GROUP BY space_id, event_date
),
final_data AS (
SELECT
s.space_id,
s.event_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
FROM summed_intervals s
JOIN params p
ON p.event_date = s.event_date
)
INSERT INTO public."space-daily-occupancy-duration" (
space_uuid,
event_date,
occupied_seconds,
occupancy_percentage
)
SELECT
space_id,
event_date,
occupied_seconds,
occupancy_percentage
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
occupancy_percentage = EXCLUDED.occupancy_percentage,
occupied_seconds = EXCLUDED.occupied_seconds;

View File

@ -1,7 +1,6 @@
WITH params AS (
SELECT
$1::uuid AS device_id,
$2::date AS target_date
$1::date AS target_date
),
total_energy AS (
SELECT
@ -14,8 +13,7 @@ total_energy AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
energy_phase_A AS (
@ -29,8 +27,7 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
energy_phase_B AS (
@ -44,8 +41,7 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
energy_phase_C AS (
@ -59,8 +55,7 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
final_data AS (

View File

@ -1,8 +1,6 @@
WITH params AS (
SELECT
$1::uuid AS device_id,
$2::date AS target_date,
$3::text AS target_hour
$1::date AS target_date
),
total_energy AS (
SELECT
@ -15,9 +13,7 @@ total_energy AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
energy_phase_A AS (
@ -31,9 +27,7 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
energy_phase_B AS (
@ -47,9 +41,7 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
energy_phase_C AS (
@ -63,9 +55,7 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
final_data AS (

View File

@ -1,7 +1,6 @@
WITH params AS (
SELECT
$1::uuid AS device_id,
$2::text AS target_month -- Format should match 'MM-YYYY'
$1::text AS target_month -- Format should match 'MM-YYYY'
),
total_energy AS (
SELECT
@ -14,7 +13,6 @@ total_energy AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),
@ -29,7 +27,6 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),
@ -44,7 +41,6 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),
@ -59,7 +55,6 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),

View File

@ -11,9 +11,10 @@ WITH params AS (
A.count_motion_detected,
A.count_presence_detected,
A.count_total_presence_detected
FROM public."presence-sensor-daily-detection" AS A
FROM public."presence-sensor-daily-device-detection" AS A
JOIN params P ON TRUE
WHERE A.device_uuid::text = ANY(P.device_ids)
AND (P.month IS NULL
OR date_trunc('month', A.event_date) = P.month
)
);

View File

@ -0,0 +1,21 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($2, ''), 'YYYY') AS year,
string_to_array(NULLIF($4, ''), ',') AS device_ids
)
SELECT
A.device_uuid,
TO_CHAR(date_trunc('month', A.event_date), 'YYYY-MM') AS event_month,
SUM(A.count_motion_detected) AS total_motion_detected,
SUM(A.count_presence_detected) AS total_presence_detected,
SUM(A.count_total_presence_detected) AS total_overall_presence
FROM public."presence-sensor-daily-device-detection" AS A
JOIN params P ON TRUE
WHERE A.device_uuid::text = ANY(P.device_ids)
AND (
P.year IS NULL
OR date_trunc('year', A.event_date) = P.year
)
GROUP BY 1,2
ORDER BY 1,2;

View File

@ -30,7 +30,6 @@ device_logs AS (
WHERE product.cat_name = 'hps'
AND "device-status-log".code = 'presence_state'
AND device.uuid::text = P.device_id
AND (P.event_date IS NULL OR "device-status-log".event_time::date = P.event_date)
),
presence_detection AS (
@ -56,6 +55,8 @@ presence_detection_summary AS (
SUM(motion_detected + presence_detected) AS count_total_presence_detected
FROM presence_detection pd
LEFT JOIN device d ON d.uuid = pd.device_id
JOIN params P ON TRUE
AND (P.event_date IS NULL OR pd.event_time::date = P.event_date)
GROUP BY 1, 2, 3, 4, 5
),
@ -93,7 +94,7 @@ daily_aggregates AS (
GROUP BY device_id, event_date
)
INSERT INTO public."presence-sensor-daily-detection" (
INSERT INTO public."presence-sensor-daily-device-detection" (
device_uuid,
event_date,
count_motion_detected,

View File

@ -0,0 +1,20 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'MM-YYYY') AS month,
string_to_array(NULLIF($2, ''), ',') AS device_ids
)
SELECT
A.date,
SUM(A.energy_consumed_kW::numeric) AS total_energy_consumed_KW,
SUM(A.energy_consumed_A::numeric) AS total_energy_consumed_A,
SUM(A.energy_consumed_B::numeric) AS total_energy_consumed_B,
SUM(A.energy_consumed_C::numeric) AS total_energy_consumed_C
FROM public."power-clamp-energy-consumed-daily" AS A
JOIN public.device AS B
ON A.device_uuid::TEXT = B."uuid"::TEXT
JOIN params P ON TRUE
WHERE B."uuid"::TEXT = ANY(P.device_ids)
AND (P.month IS NULL OR date_trunc('month', A.date)= P.month)
GROUP BY A.date
ORDER BY A.date;

View File

@ -1,8 +1,8 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($2, ''), 'DD-MM-YYYY') AS start_date,
TO_DATE(NULLIF($3, ''), 'DD-MM-YYYY') AS end_date,
string_to_array(NULLIF($4, ''), ',') AS device_ids
TO_DATE(NULLIF($1, ''), 'DD-MM-YYYY') AS start_date,
TO_DATE(NULLIF($2, ''), 'DD-MM-YYYY') AS end_date,
string_to_array(NULLIF($3, ''), ',') AS device_ids
)
SELECT TO_CHAR(A.date, 'MM-YYYY') AS month,

View File

@ -0,0 +1,15 @@
WITH params AS (
SELECT
$1::uuid AS space_id,
TO_DATE(NULLIF($2, ''), 'YYYY') AS event_year
)
SELECT psdsd.*
FROM public."presence-sensor-daily-space-detection" psdsd
JOIN params P ON true
WHERE psdsd.space_uuid = P.space_id
AND (
P.event_year IS NULL
OR TO_CHAR(psdsd.event_date, 'YYYY') = TO_CHAR(P.event_year, 'YYYY')
)
ORDER BY space_uuid, event_date

View File

@ -0,0 +1,100 @@
WITH device_logs AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".value,
LAG("device-status-log".value)
OVER (PARTITION BY device.uuid ORDER BY "device-status-log".event_time) AS prev_value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hps'
AND "device-status-log".code = 'presence_state'
),
-- 1. All 'none' → presence or motion
presence_transitions AS (
SELECT
space_id,
event_time,
event_time::date AS event_date,
value
FROM device_logs
WHERE (value = 'motion' OR value = 'presence') AND prev_value = 'none'
),
-- 2. Cluster events per space_id within 30s
clustered_events AS (
SELECT
space_id,
event_time,
event_date,
value,
SUM(new_cluster_flag) OVER (PARTITION BY space_id ORDER BY event_time) AS cluster_id
FROM (
SELECT *,
CASE
WHEN event_time - LAG(event_time) OVER (PARTITION BY space_id ORDER BY event_time) > INTERVAL '30 seconds'
THEN 1 ELSE 0
END AS new_cluster_flag
FROM presence_transitions
) marked
),
-- 3. Determine dominant type (motion vs presence) per cluster
cluster_type AS (
SELECT
space_id,
event_date,
cluster_id,
COUNT(*) FILTER (WHERE value = 'motion') AS motion_count,
COUNT(*) FILTER (WHERE value = 'presence') AS presence_count,
CASE
WHEN COUNT(*) FILTER (WHERE value = 'motion') > COUNT(*) FILTER (WHERE value = 'presence') THEN 'motion'
ELSE 'presence'
END AS dominant_type
FROM clustered_events
GROUP BY space_id, event_date, cluster_id
),
-- 4. Count clusters by dominant type
summary AS (
SELECT
space_id,
event_date,
COUNT(*) FILTER (WHERE dominant_type = 'motion') AS count_motion_detected,
COUNT(*) FILTER (WHERE dominant_type = 'presence') AS count_presence_detected,
COUNT(*) AS count_total_presence_detected
FROM cluster_type
GROUP BY space_id, event_date
)
-- 5. Output
, final_table as (
SELECT *
FROM summary
ORDER BY space_id, event_date)
INSERT INTO public."presence-sensor-daily-space-detection" (
space_uuid,
event_date,
count_motion_detected,
count_presence_detected,
count_total_presence_detected
)
SELECT
space_id,
event_date,
count_motion_detected,
count_presence_detected,
count_total_presence_detected
FROM final_table
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
count_motion_detected = EXCLUDED.count_motion_detected,
count_presence_detected = EXCLUDED.count_presence_detected,
count_total_presence_detected = EXCLUDED.count_total_presence_detected;

View File

@ -0,0 +1,111 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
),
device_logs AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".value,
LAG("device-status-log".value)
OVER (PARTITION BY device.uuid ORDER BY "device-status-log".event_time) AS prev_value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hps'
AND "device-status-log".code = 'presence_state'
),
-- 1. All 'none' → presence or motion
presence_transitions AS (
SELECT
space_id,
event_time,
event_time::date AS event_date,
value
FROM device_logs
WHERE (value = 'motion' OR value = 'presence') AND prev_value = 'none'
),
-- 2. Cluster events per space_id within 30s
clustered_events AS (
SELECT
space_id,
event_time,
event_date,
value,
SUM(new_cluster_flag) OVER (PARTITION BY space_id ORDER BY event_time) AS cluster_id
FROM (
SELECT *,
CASE
WHEN event_time - LAG(event_time) OVER (PARTITION BY space_id ORDER BY event_time) > INTERVAL '30 seconds'
THEN 1 ELSE 0
END AS new_cluster_flag
FROM presence_transitions
) marked
),
-- 3. Determine dominant type (motion vs presence) per cluster
cluster_type AS (
SELECT
space_id,
event_date,
cluster_id,
COUNT(*) FILTER (WHERE value = 'motion') AS motion_count,
COUNT(*) FILTER (WHERE value = 'presence') AS presence_count,
CASE
WHEN COUNT(*) FILTER (WHERE value = 'motion') > COUNT(*) FILTER (WHERE value = 'presence') THEN 'motion'
ELSE 'presence'
END AS dominant_type
FROM clustered_events
GROUP BY space_id, event_date, cluster_id
),
-- 4. Count clusters by dominant type
summary AS (
SELECT
space_id,
event_date,
COUNT(*) FILTER (WHERE dominant_type = 'motion') AS count_motion_detected,
COUNT(*) FILTER (WHERE dominant_type = 'presence') AS count_presence_detected,
COUNT(*) AS count_total_presence_detected
FROM cluster_type
GROUP BY space_id, event_date
)
-- 5. Output
, final_table as (
SELECT summary.space_id,
summary.event_date,
count_motion_detected,
count_presence_detected,
count_total_presence_detected
FROM summary
JOIN params P ON true
where (P.event_date IS NULL or summary.event_date::date = P.event_date)
ORDER BY space_id, event_date)
INSERT INTO public."presence-sensor-daily-space-detection" (
space_uuid,
event_date,
count_motion_detected,
count_presence_detected,
count_total_presence_detected
)
SELECT
space_id,
event_date,
count_motion_detected,
count_presence_detected,
count_total_presence_detected
FROM final_table
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
count_motion_detected = EXCLUDED.count_motion_detected,
count_presence_detected = EXCLUDED.count_presence_detected,
count_total_presence_detected = EXCLUDED.count_total_presence_detected;

View File

@ -0,0 +1,362 @@
-- Function to calculate AQI
CREATE OR REPLACE FUNCTION calculate_aqi(p_pollutant TEXT, concentration NUMERIC)
RETURNS NUMERIC AS $$
DECLARE
c_low NUMERIC;
c_high NUMERIC;
i_low INT;
i_high INT;
BEGIN
SELECT v.c_low, v.c_high, v.i_low, v.i_high
INTO c_low, c_high, i_low, i_high
FROM (
VALUES
-- PM2.5
('pm25', 0.0, 12.0, 0, 50),
('pm25', 12.1, 35.4, 51, 100),
('pm25', 35.5, 55.4, 101, 150),
('pm25', 55.5, 150.4, 151, 200),
('pm25', 150.5, 250.4, 201, 300),
('pm25', 250.5, 500.4, 301, 500),
-- PM10
('pm10', 0, 54, 0, 50),
('pm10', 55, 154, 51, 100),
('pm10', 155, 254, 101, 150),
('pm10', 255, 354, 151, 200),
-- VOC
('voc', 0, 200, 0, 50),
('voc', 201, 400, 51, 100),
('voc', 401, 600, 101, 150),
('voc', 601, 1000, 151, 200),
-- CH2O
('ch2o', 0, 2, 0, 50),
('ch2o', 2.1, 4, 51, 100),
('ch2o', 4.1, 6, 101, 150),
-- CO2
('co2', 350, 1000, 0, 50),
('co2', 1001, 1250, 51, 100),
('co2', 1251, 1500, 101, 150),
('co2', 1501, 2000, 151, 200)
) AS v(pollutant, c_low, c_high, i_low, i_high)
WHERE v.pollutant = LOWER(p_pollutant)
AND concentration BETWEEN v.c_low AND v.c_high
LIMIT 1;
RETURN ROUND(((i_high - i_low) * (concentration - c_low) / (c_high - c_low)) + i_low);
END;
$$ LANGUAGE plpgsql;
-- Function to classify AQI
CREATE OR REPLACE FUNCTION classify_aqi(aqi NUMERIC)
RETURNS TEXT AS $$
BEGIN
RETURN CASE
WHEN aqi BETWEEN 0 AND 50 THEN 'Good'
WHEN aqi BETWEEN 51 AND 100 THEN 'Moderate'
WHEN aqi BETWEEN 101 AND 150 THEN 'Unhealthy for Sensitive Groups'
WHEN aqi BETWEEN 151 AND 200 THEN 'Unhealthy'
WHEN aqi BETWEEN 201 AND 300 THEN 'Very Unhealthy'
WHEN aqi >= 301 THEN 'Hazardous'
ELSE NULL
END;
END;
$$ LANGUAGE plpgsql;
-- Function to convert AQI level string to number
CREATE OR REPLACE FUNCTION level_to_numeric(level_text TEXT)
RETURNS NUMERIC AS $$
BEGIN
RETURN CAST(regexp_replace(level_text, '[^0-9]', '', 'g') AS NUMERIC);
EXCEPTION WHEN others THEN
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
-- Query Pipeline Starts Here
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
device_id,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY device_id, space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
device_id,
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT device_id, space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, aqi_category
UNION ALL
SELECT device_id, space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, pm25_category
UNION ALL
SELECT device_id, space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, pm10_category
UNION ALL
SELECT device_id, space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, voc_category
UNION ALL
SELECT device_id, space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, co2_category
UNION ALL
SELECT device_id, space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
device_id,
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY device_id, space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.device_id,
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.device_id = dcc.device_id AND dt.event_date = dcc.event_date
GROUP BY dt.device_id, dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
device_id,
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY device_id, space_id, event_date
)
SELECT
p.device_id,
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.device_id = a.device_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date;

View File

@ -0,0 +1,275 @@
-- Query Pipeline Starts Here
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
)
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date;

View File

@ -1,91 +1,90 @@
-- Step 1: Get device presence events with previous timestamps
WITH start_date AS (
SELECT
d.uuid AS device_id,
d.space_device_uuid AS space_id,
l.value,
l.event_time::timestamp AS event_time,
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
FROM device d
LEFT JOIN "device-status-log" l
ON d.uuid = l.device_id
LEFT JOIN product p
ON p.uuid = d.product_device_uuid
WHERE p.cat_name = 'hps'
AND l.code = 'presence_state'
WITH presence_logs AS (
SELECT
d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time,
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value
FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id
JOIN product p ON p.uuid = d.product_device_uuid
WHERE l.code = 'presence_state'
AND p.cat_name = 'hps'
),
-- Step 2: Identify periods when device reports "none"
device_none_periods AS (
SELECT
space_id,
device_id,
event_time AS empty_from,
LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
FROM start_date
WHERE value = 'none'
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none')
presence_intervals AS (
SELECT
space_id,
prev_time AS start_time,
event_time AS end_time
FROM presence_logs
WHERE value = 'none'
AND prev_value = 'presence'
AND prev_time IS NOT NULL
),
-- Step 3: Clip the "none" periods to the edges of each day
clipped_device_none_periods AS (
SELECT
space_id,
GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
FROM device_none_periods
WHERE empty_until IS NOT NULL
-- Split intervals across days
split_intervals AS (
SELECT
space_id,
generate_series(
date_trunc('day', start_time),
date_trunc('day', end_time),
interval '1 day'
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
FROM presence_intervals
),
-- Step 4: Break multi-day periods into daily intervals
generated_daily_intervals AS (
SELECT
space_id,
gs::date AS day,
GREATEST(clipped_from, gs) AS interval_start,
LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
FROM clipped_device_none_periods,
LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
-- Mark and group overlapping intervals per space per day
ordered_intervals AS (
SELECT
space_id,
event_date,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
FROM split_intervals
),
-- Step 5: Merge overlapping or adjacent intervals per day
grouped_intervals AS (
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
),
-- Merge overlapping intervals per group
merged_intervals AS (
SELECT
space_id,
day,
interval_start,
interval_end
FROM (
SELECT
space_id,
day,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
FROM generated_daily_intervals
) sub
WHERE prev_end IS NULL OR interval_start > prev_end
SELECT
space_id,
event_date,
MIN(interval_start) AS merged_start,
MAX(interval_end) AS merged_end
FROM grouped_intervals
GROUP BY space_id, event_date, grp
),
-- Step 6: Sum up total missing seconds (device reported "none") per day
missing_seconds_per_day AS (
SELECT
space_id,
day AS missing_date,
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
FROM merged_intervals
GROUP BY space_id, day
),
-- Step 7: Calculate total occupied time per day (86400 - missing)
occupied_seconds_per_day AS (
SELECT
space_id,
missing_date as date,
86400 - total_missing_seconds AS total_occupied_seconds
FROM missing_seconds_per_day
-- Sum durations of merged intervals
summed_intervals AS (
SELECT
space_id,
event_date,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
FROM merged_intervals
GROUP BY space_id, event_date
)
-- Final Output
SELECT *
FROM occupied_seconds_per_day
ORDER BY 1,2;
-- Final output with capped seconds and percentage
SELECT
space_id,
event_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
FROM summed_intervals
ORDER BY space_id, event_date;

View File

@ -85,7 +85,7 @@ daily_aggregate AS (
GROUP BY device_id, event_date
)
INSERT INTO public."presence-sensor-daily-detection" (
INSERT INTO public."presence-sensor-daily-device-detection" (
device_uuid,
event_date,
count_motion_detected,

View File

@ -0,0 +1,18 @@
export function calculateAQI(pm2_5: number): number {
const breakpoints = [
{ pmLow: 0.0, pmHigh: 12.0, aqiLow: 0, aqiHigh: 50 },
{ pmLow: 12.1, pmHigh: 35.4, aqiLow: 51, aqiHigh: 100 },
{ pmLow: 35.5, pmHigh: 55.4, aqiLow: 101, aqiHigh: 150 },
{ pmLow: 55.5, pmHigh: 150.4, aqiLow: 151, aqiHigh: 200 },
{ pmLow: 150.5, pmHigh: 250.4, aqiLow: 201, aqiHigh: 300 },
{ pmLow: 250.5, pmHigh: 500.4, aqiLow: 301, aqiHigh: 500 },
];
const bp = breakpoints.find((b) => pm2_5 >= b.pmLow && pm2_5 <= b.pmHigh);
if (!bp) return pm2_5 > 500.4 ? 500 : 0; // Handle out-of-range values
return Math.round(
((bp.aqiHigh - bp.aqiLow) / (bp.pmHigh - bp.pmLow)) * (pm2_5 - bp.pmLow) +
bp.aqiLow,
);
}

View File

@ -0,0 +1,11 @@
import { DeviceEntity } from '../modules/device/entities';
export function addSpaceUuidToDevices(
devices: DeviceEntity[],
spaceUuid: string,
): DeviceEntity[] {
return devices.map((device) => {
(device as any).spaceUuid = spaceUuid;
return device;
});
}

View File

@ -1,7 +1,7 @@
import { HttpException, HttpStatus, Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import * as nodemailer from 'nodemailer';
import axios from 'axios';
import * as nodemailer from 'nodemailer';
import {
SEND_EMAIL_API_URL_DEV,
SEND_EMAIL_API_URL_PROD,
@ -83,12 +83,17 @@ export class EmailService {
);
}
}
async sendEmailWithTemplate(
email: string,
name: string,
isEnable: boolean,
isDelete: boolean,
): Promise<void> {
async sendEmailWithTemplate({
email,
name,
isEnable,
isDelete,
}: {
email: string;
name: string;
isEnable: boolean;
isDelete: boolean;
}): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',

Some files were not shown because too many files have changed in this diff Show More