Compare commits

..

1 Commits

Author SHA1 Message Date
478c4ca498 device model fix timestamp and null pm25 2025-05-27 22:44:47 -04:00
206 changed files with 9943 additions and 11537 deletions

View File

@ -21,7 +21,6 @@ module.exports = {
'@typescript-eslint/explicit-function-return-type': 'off', '@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off', '@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off', '@typescript-eslint/no-explicit-any': 'off',
"@typescript-eslint/no-unused-vars": 'warn',
}, },
settings: { settings: {
'import/resolver': { 'import/resolver': {

View File

@ -1,17 +0,0 @@
<!--
Thanks for contributing!
Provide a description of your changes below and a general summary in the title.
-->
## Jira Ticket
[SP-0000](https://syncrow.atlassian.net/browse/SP-0000)
## Description
<!--- Describe your changes in detail -->
## How to Test
<!--- Describe the created APIs / Logic -->

View File

@ -1,7 +1,4 @@
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy name: Backend deployment to Azure App Service
# More GitHub Actions for Azure: https://github.com/Azure/actions
name: Build and deploy container app to Azure Web App - syncrow(staging)
on: on:
push: push:
@ -9,43 +6,50 @@ on:
- main - main
workflow_dispatch: workflow_dispatch:
env:
AZURE_WEB_APP_NAME: 'syncrow'
AZURE_WEB_APP_SLOT_NAME: 'staging'
ACR_REGISTRY: 'syncrow.azurecr.io'
IMAGE_NAME: 'backend'
IMAGE_TAG: 'latest'
jobs: jobs:
build: build_and_deploy:
runs-on: 'ubuntu-latest'
steps:
- uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to registry
uses: docker/login-action@v2
with:
registry: https://syncrow.azurecr.io/
username: ${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}
password: ${{ secrets.AzureAppService_ContainerPassword_e7b0ff54f54d44cba04a970a22384848 }}
- name: Build and push container image to registry
uses: docker/build-push-action@v3
with:
push: true
tags: syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}
file: ./Dockerfile
deploy:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build
environment:
name: 'staging'
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
steps: steps:
- name: Deploy to Azure Web App - uses: actions/checkout@v4
id: deploy-to-webapp
uses: azure/webapps-deploy@v2 - name: Set up Node.js
uses: actions/setup-node@v3
with: with:
app-name: 'syncrow' node-version: '20'
slot-name: 'staging'
publish-profile: ${{ secrets.AzureAppService_PublishProfile_44f7766441ec4796b74789e9761ef589 }} - name: Install dependencies and build project
images: 'syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}' run: |
npm install
npm run build
- name: Log in to Azure
uses: azure/login@v1
with:
creds: ${{ secrets.AZURE_CREDENTIALS }}
- name: Log in to Azure Container Registry
run: az acr login --name ${{ env.ACR_REGISTRY }}
- name: List build output
run: ls -R dist/
- name: Build and push Docker image
run: |
docker build . -t ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
docker push ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
- name: Set Web App with Docker container
run: |
az webapp config container set \
--name ${{ env.AZURE_WEB_APP_NAME }} \
--resource-group backend \
--docker-custom-image-name ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} \
--docker-registry-server-url https://${{ env.ACR_REGISTRY }}

View File

@ -1,73 +0,0 @@
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy
# More GitHub Actions for Azure: https://github.com/Azure/actions
name: Build and deploy Node.js app to Azure Web App - syncrow
on:
push:
branches:
- main
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read #This is required for actions/checkout
steps:
- uses: actions/checkout@v4
- name: Set up Node.js version
uses: actions/setup-node@v3
with:
node-version: '20.x'
- name: npm install, build, and test
run: |
npm install
npm run build --if-present
npm run test --if-present
- name: Zip artifact for deployment
run: zip release.zip ./* -r
- name: Upload artifact for deployment job
uses: actions/upload-artifact@v4
with:
name: node-app
path: release.zip
deploy:
runs-on: ubuntu-latest
needs: build
environment:
name: 'stg'
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
permissions:
id-token: write #This is required for requesting the JWT
contents: read #This is required for actions/checkout
steps:
- name: Download artifact from build job
uses: actions/download-artifact@v4
with:
name: node-app
- name: Unzip artifact for deployment
run: unzip release.zip
- name: Login to Azure
uses: azure/login@v2
with:
client-id: ${{ secrets.AZUREAPPSERVICE_CLIENTID_515C8E782CFF431AB20448C85CA0FE58 }}
tenant-id: ${{ secrets.AZUREAPPSERVICE_TENANTID_2AEFE5534424490387C08FAE41573CC2 }}
subscription-id: ${{ secrets.AZUREAPPSERVICE_SUBSCRIPTIONID_00623C33023749FEA5F6BC36884F9C8A }}
- name: 'Deploy to Azure Web App'
id: deploy-to-webapp
uses: azure/webapps-deploy@v3
with:
app-name: 'syncrow'
slot-name: 'stg'
package: .

View File

@ -1,64 +0,0 @@
name: 🤖 AI PR Description Commenter (100% Safe with jq)
on:
pull_request:
types: [opened, edited]
jobs:
generate-description:
runs-on: ubuntu-latest
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- name: Install GitHub CLI and jq
run: |
sudo apt-get update
sudo apt-get install gh jq -y
- name: Fetch PR Commits
id: fetch_commits
run: |
COMMITS=$(gh pr view ${{ github.event.pull_request.number }} --json commits --jq '.commits[].message' | sed 's/^/- /')
echo "commits<<EOF" >> $GITHUB_ENV
echo "$COMMITS" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
- name: Generate PR Description with OpenAI (Safe JSON with jq)
run: |
REQUEST_BODY=$(jq -n \
--arg model "gpt-4o" \
--arg content "Given the following commit messages:\n\n${commits}\n\nGenerate a clear and professional pull request description." \
'{
model: $model,
messages: [{ role: "user", content: $content }]
}'
)
RESPONSE=$(curl -s https://api.openai.com/v1/chat/completions \
-H "Authorization: Bearer $OPENAI_API_KEY" \
-H "Content-Type: application/json" \
-d "$REQUEST_BODY")
DESCRIPTION=$(echo "$RESPONSE" | jq -r '.choices[0].message.content')
echo "---------- OpenAI Raw Response ----------"
echo "$RESPONSE"
echo "---------- Extracted Description ----------"
echo "$DESCRIPTION"
echo "description<<EOF" >> $GITHUB_ENV
echo "$DESCRIPTION" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
commits: ${{ env.commits }}
- name: Post AI Generated Description as Comment
run: |
gh pr comment ${{ github.event.pull_request.number }} --body "${{ env.description }}"
env:
GH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}

2
.gitignore vendored
View File

@ -4,7 +4,7 @@
/build /build
#github #github
/.github/workflows /.github
# Logs # Logs
logs logs

View File

@ -1,19 +1,18 @@
import { PlatformType } from '@app/common/constants/platform-type.enum';
import { RoleType } from '@app/common/constants/role.type.enum';
import { UserEntity } from '@app/common/modules/user/entities';
import { import {
BadRequestException, BadRequestException,
Injectable, Injectable,
UnauthorizedException, UnauthorizedException,
} from '@nestjs/common'; } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { JwtService } from '@nestjs/jwt'; import { JwtService } from '@nestjs/jwt';
import * as argon2 from 'argon2'; import * as argon2 from 'argon2';
import { OAuth2Client } from 'google-auth-library';
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
import { UserRepository } from '../../../../common/src/modules/user/repositories';
import { HelperHashService } from '../../helper/services'; import { HelperHashService } from '../../helper/services';
import { UserRepository } from '../../../../common/src/modules/user/repositories';
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
import { ConfigService } from '@nestjs/config';
import { OAuth2Client } from 'google-auth-library';
import { PlatformType } from '@app/common/constants/platform-type.enum';
import { RoleType } from '@app/common/constants/role.type.enum';
@Injectable() @Injectable()
export class AuthService { export class AuthService {
@ -33,7 +32,7 @@ export class AuthService {
pass: string, pass: string,
regionUuid?: string, regionUuid?: string,
platform?: PlatformType, platform?: PlatformType,
): Promise<Omit<UserEntity, 'password'>> { ): Promise<any> {
const user = await this.userRepository.findOne({ const user = await this.userRepository.findOne({
where: { where: {
email, email,
@ -41,17 +40,16 @@ export class AuthService {
}, },
relations: ['roleType', 'project'], relations: ['roleType', 'project'],
}); });
if (!user) {
throw new BadRequestException('Invalid credentials');
}
if ( if (
platform === PlatformType.WEB && platform === PlatformType.WEB &&
[RoleType.SPACE_OWNER, RoleType.SPACE_MEMBER].includes( (user.roleType.type === RoleType.SPACE_OWNER ||
user.roleType.type as RoleType, user.roleType.type === RoleType.SPACE_MEMBER)
)
) { ) {
throw new UnauthorizedException('Access denied for web platform'); throw new UnauthorizedException('Access denied for web platform');
} }
if (!user) {
throw new BadRequestException('Invalid credentials');
}
if (!user.isUserVerified) { if (!user.isUserVerified) {
throw new BadRequestException('User is not verified'); throw new BadRequestException('User is not verified');
@ -71,9 +69,8 @@ export class AuthService {
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars // eslint-disable-next-line @typescript-eslint/no-unused-vars
// const { password, ...result } = user; const { password, ...result } = user;
delete user.password; return result;
return user;
} }
async createSession(data): Promise<UserSessionEntity> { async createSession(data): Promise<UserSessionEntity> {
@ -116,7 +113,6 @@ export class AuthService {
hasAcceptedWebAgreement: user.hasAcceptedWebAgreement, hasAcceptedWebAgreement: user.hasAcceptedWebAgreement,
hasAcceptedAppAgreement: user.hasAcceptedAppAgreement, hasAcceptedAppAgreement: user.hasAcceptedAppAgreement,
project: user?.project, project: user?.project,
bookingPoints: user?.bookingPoints,
}; };
if (payload.googleCode) { if (payload.googleCode) {
const profile = await this.getProfile(payload.googleCode); const profile = await this.getProfile(payload.googleCode);

View File

@ -1,11 +1,12 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { ErrorMessageService } from 'src/error-message/error-message.service';
import { AuthModule } from './auth/auth.module';
import { CommonService } from './common.service'; import { CommonService } from './common.service';
import config from './config';
import { DatabaseModule } from './database/database.module'; import { DatabaseModule } from './database/database.module';
import { HelperModule } from './helper/helper.module'; import { HelperModule } from './helper/helper.module';
import { AuthModule } from './auth/auth.module';
import { ConfigModule } from '@nestjs/config';
import config from './config';
import { EmailService } from './util/email.service';
import { ErrorMessageService } from 'src/error-message/error-message.service';
import { TuyaService } from './integrations/tuya/services/tuya.service'; import { TuyaService } from './integrations/tuya/services/tuya.service';
import { SceneDeviceRepository } from './modules/scene-device/repositories'; import { SceneDeviceRepository } from './modules/scene-device/repositories';
import { SpaceRepository } from './modules/space'; import { SpaceRepository } from './modules/space';
@ -14,7 +15,6 @@ import {
SubspaceModelRepository, SubspaceModelRepository,
} from './modules/space-model'; } from './modules/space-model';
import { SubspaceRepository } from './modules/space/repositories/subspace.repository'; import { SubspaceRepository } from './modules/space/repositories/subspace.repository';
import { EmailService } from './util/email/email.service';
@Module({ @Module({
providers: [ providers: [
CommonService, CommonService,

View File

@ -10,8 +10,6 @@ export default registerAs(
SMTP_USER: process.env.SMTP_USER, SMTP_USER: process.env.SMTP_USER,
SMTP_SENDER: process.env.SMTP_SENDER, SMTP_SENDER: process.env.SMTP_SENDER,
SMTP_PASSWORD: process.env.SMTP_PASSWORD, SMTP_PASSWORD: process.env.SMTP_PASSWORD,
BATCH_EMAIL_API_URL: process.env.BATCH_EMAIL_API_URL,
SEND_EMAIL_API_URL: process.env.SEND_EMAIL_API_URL,
MAILTRAP_API_TOKEN: process.env.MAILTRAP_API_TOKEN, MAILTRAP_API_TOKEN: process.env.MAILTRAP_API_TOKEN,
MAILTRAP_INVITATION_TEMPLATE_UUID: MAILTRAP_INVITATION_TEMPLATE_UUID:
process.env.MAILTRAP_INVITATION_TEMPLATE_UUID, process.env.MAILTRAP_INVITATION_TEMPLATE_UUID,
@ -23,9 +21,5 @@ export default registerAs(
process.env.MAILTRAP_EDIT_USER_TEMPLATE_UUID, process.env.MAILTRAP_EDIT_USER_TEMPLATE_UUID,
MAILTRAP_SEND_OTP_TEMPLATE_UUID: MAILTRAP_SEND_OTP_TEMPLATE_UUID:
process.env.MAILTRAP_SEND_OTP_TEMPLATE_UUID, process.env.MAILTRAP_SEND_OTP_TEMPLATE_UUID,
MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID:
process.env.MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID,
MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID:
process.env.MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID,
}), }),
); );

View File

@ -69,47 +69,7 @@ export class ControllerRoute {
'Retrieve the list of all regions registered in Syncrow.'; 'Retrieve the list of all regions registered in Syncrow.';
}; };
}; };
static BOOKABLE_SPACES = class {
public static readonly ROUTE = 'bookable-spaces';
static ACTIONS = class {
public static readonly ADD_BOOKABLE_SPACES_SUMMARY =
'Add new bookable spaces';
public static readonly ADD_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint allows you to add new bookable spaces by providing the required details.';
public static readonly GET_ALL_BOOKABLE_SPACES_SUMMARY =
'Get all bookable spaces';
public static readonly GET_ALL_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint retrieves all bookable spaces.';
public static readonly UPDATE_BOOKABLE_SPACES_SUMMARY =
'Update existing bookable spaces';
public static readonly UPDATE_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint allows you to update existing bookable spaces by providing the required details.';
};
};
static BOOKING = class {
public static readonly ROUTE = 'bookings';
static ACTIONS = class {
public static readonly ADD_BOOKING_SUMMARY = 'Add new booking';
public static readonly ADD_BOOKING_DESCRIPTION =
'This endpoint allows you to add new booking by providing the required details.';
public static readonly GET_ALL_BOOKINGS_SUMMARY = 'Get all bookings';
public static readonly GET_ALL_BOOKINGS_DESCRIPTION =
'This endpoint retrieves all bookings.';
public static readonly GET_MY_BOOKINGS_SUMMARY = 'Get my bookings';
public static readonly GET_MY_BOOKINGS_DESCRIPTION =
'This endpoint retrieves all bookings for the authenticated user.';
};
};
static COMMUNITY = class { static COMMUNITY = class {
public static readonly ROUTE = '/projects/:projectUuid/communities'; public static readonly ROUTE = '/projects/:projectUuid/communities';
static ACTIONS = class { static ACTIONS = class {
@ -239,11 +199,6 @@ export class ControllerRoute {
public static readonly UPDATE_SPACE_DESCRIPTION = public static readonly UPDATE_SPACE_DESCRIPTION =
'Updates a space by its UUID and community ID. You can update the name, parent space, and other properties. If a parent space is provided and not already a parent, its `isParent` flag will be set to true.'; 'Updates a space by its UUID and community ID. You can update the name, parent space, and other properties. If a parent space is provided and not already a parent, its `isParent` flag will be set to true.';
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_SUMMARY =
'Update the order of child spaces under a specific parent space';
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_DESCRIPTION =
'Updates the order of child spaces under a specific parent space. You can provide a new order for the child spaces.';
public static readonly GET_HEIRARCHY_SUMMARY = 'Get space hierarchy'; public static readonly GET_HEIRARCHY_SUMMARY = 'Get space hierarchy';
public static readonly GET_HEIRARCHY_DESCRIPTION = public static readonly GET_HEIRARCHY_DESCRIPTION =
'This endpoint retrieves the hierarchical structure of spaces under a given space ID. It returns all the child spaces nested within the specified space, organized by their parent-child relationships. '; 'This endpoint retrieves the hierarchical structure of spaces under a given space ID. It returns all the child spaces nested within the specified space, organized by their parent-child relationships. ';
@ -442,11 +397,6 @@ export class ControllerRoute {
public static readonly DELETE_USER_SUMMARY = 'Delete user by UUID'; public static readonly DELETE_USER_SUMMARY = 'Delete user by UUID';
public static readonly DELETE_USER_DESCRIPTION = public static readonly DELETE_USER_DESCRIPTION =
'This endpoint deletes a user identified by their UUID. Accessible only by users with the Super Admin role.'; 'This endpoint deletes a user identified by their UUID. Accessible only by users with the Super Admin role.';
public static readonly DELETE_USER_PROFILE_SUMMARY =
'Delete user profile by UUID';
public static readonly DELETE_USER_PROFILE_DESCRIPTION =
'This endpoint deletes a user profile identified by their UUID. Accessible only by users with the Super Admin role.';
public static readonly UPDATE_USER_WEB_AGREEMENT_SUMMARY = public static readonly UPDATE_USER_WEB_AGREEMENT_SUMMARY =
'Update user web agreement by user UUID'; 'Update user web agreement by user UUID';
public static readonly UPDATE_USER_WEB_AGREEMENT_DESCRIPTION = public static readonly UPDATE_USER_WEB_AGREEMENT_DESCRIPTION =
@ -515,16 +465,7 @@ export class ControllerRoute {
'This endpoint retrieves the terms and conditions for the application.'; 'This endpoint retrieves the terms and conditions for the application.';
}; };
}; };
static WEATHER = class {
public static readonly ROUTE = 'weather';
static ACTIONS = class {
public static readonly FETCH_WEATHER_DETAILS_SUMMARY =
'Fetch Weather Details';
public static readonly FETCH_WEATHER_DETAILS_DESCRIPTION =
'This endpoint retrieves the current weather details for a specified location like temperature, humidity, etc.';
};
};
static PRIVACY_POLICY = class { static PRIVACY_POLICY = class {
public static readonly ROUTE = 'policy'; public static readonly ROUTE = 'policy';
@ -551,6 +492,7 @@ export class ControllerRoute {
}; };
static PowerClamp = class { static PowerClamp = class {
public static readonly ROUTE = 'power-clamp'; public static readonly ROUTE = 'power-clamp';
static ACTIONS = class { static ACTIONS = class {
public static readonly GET_ENERGY_SUMMARY = public static readonly GET_ENERGY_SUMMARY =
'Get power clamp historical data'; 'Get power clamp historical data';
@ -573,20 +515,6 @@ export class ControllerRoute {
'This endpoint retrieves the occupancy heat map data based on the provided parameters.'; 'This endpoint retrieves the occupancy heat map data based on the provided parameters.';
}; };
}; };
static AQI = class {
public static readonly ROUTE = 'aqi';
static ACTIONS = class {
public static readonly GET_AQI_RANGE_DATA_SUMMARY = 'Get AQI range data';
public static readonly GET_AQI_RANGE_DATA_DESCRIPTION =
'This endpoint retrieves the AQI (Air Quality Index) range data based on the provided parameters.';
public static readonly GET_AQI_DISTRIBUTION_DATA_SUMMARY =
'Get AQI distribution data';
public static readonly GET_AQI_DISTRIBUTION_DATA_DESCRIPTION =
'This endpoint retrieves the AQI (Air Quality Index) distribution data based on the provided parameters.';
};
};
static DEVICE = class { static DEVICE = class {
public static readonly ROUTE = 'devices'; public static readonly ROUTE = 'devices';
@ -677,11 +605,6 @@ export class ControllerRoute {
'Delete scenes by device uuid and switch name'; 'Delete scenes by device uuid and switch name';
public static readonly DELETE_SCENES_BY_SWITCH_NAME_DESCRIPTION = public static readonly DELETE_SCENES_BY_SWITCH_NAME_DESCRIPTION =
'This endpoint deletes all scenes associated with a specific switch device.'; 'This endpoint deletes all scenes associated with a specific switch device.';
public static readonly POPULATE_TUYA_CONST_UUID_SUMMARY =
'Populate Tuya const UUID';
public static readonly POPULATE_TUYA_CONST_UUID_DESCRIPTION =
'This endpoint populates the Tuya const UUID for all devices.';
}; };
}; };
static DEVICE_COMMISSION = class { static DEVICE_COMMISSION = class {

View File

@ -0,0 +1,3 @@
export const SEND_EMAIL_API_URL_PROD = 'https://send.api.mailtrap.io/api/send/';
export const SEND_EMAIL_API_URL_DEV =
'https://sandbox.api.mailtrap.io/api/send/2634012';

View File

@ -1,8 +0,0 @@
export enum PollutantType {
AQI = 'aqi',
PM25 = 'pm25',
PM10 = 'pm10',
VOC = 'voc',
CO2 = 'co2',
CH2O = 'ch2o',
}

View File

@ -15,10 +15,8 @@ export enum ProductType {
WL = 'WL', WL = 'WL',
GD = 'GD', GD = 'GD',
CUR = 'CUR', CUR = 'CUR',
CUR_2 = 'CUR_2',
PC = 'PC', PC = 'PC',
FOUR_S = '4S', FOUR_S = '4S',
SIX_S = '6S', SIX_S = '6S',
SOS = 'SOS', SOS = 'SOS',
AQI = 'AQI',
} }

View File

@ -1,33 +1,51 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { ConfigModule, ConfigService } from '@nestjs/config'; import { ConfigModule, ConfigService } from '@nestjs/config';
import { TypeOrmModule } from '@nestjs/typeorm'; import { TypeOrmModule } from '@nestjs/typeorm';
import { DeviceEntity } from '../modules/device/entities'; import { SnakeNamingStrategy } from './strategies';
import { PermissionTypeEntity } from '../modules/permission/entities'; import { UserEntity } from '../modules/user/entities/user.entity';
import { ProductEntity } from '../modules/product/entities';
import { UserSessionEntity } from '../modules/session/entities/session.entity'; import { UserSessionEntity } from '../modules/session/entities/session.entity';
import { UserOtpEntity } from '../modules/user/entities'; import { UserOtpEntity } from '../modules/user/entities';
import { UserEntity } from '../modules/user/entities/user.entity'; import { ProductEntity } from '../modules/product/entities';
import { SnakeNamingStrategy } from './strategies'; import { DeviceEntity } from '../modules/device/entities';
import { PermissionTypeEntity } from '../modules/permission/entities';
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger'; import { UserSpaceEntity } from '../modules/user/entities';
import { createLogger } from 'winston'; import { DeviceUserPermissionEntity } from '../modules/device/entities';
import { winstonLoggerOptions } from '../logger/services/winston.logger'; import { RoleTypeEntity } from '../modules/role-type/entities';
import { AqiSpaceDailyPollutantStatsEntity } from '../modules/aqi/entities'; import { UserNotificationEntity } from '../modules/user/entities';
import { AutomationEntity } from '../modules/automation/entities'; import { DeviceNotificationEntity } from '../modules/device/entities';
import { BookableSpaceEntity } from '../modules/booking/entities/bookable-space.entity'; import { RegionEntity } from '../modules/region/entities';
import { BookingEntity } from '../modules/booking/entities/booking.entity'; import { TimeZoneEntity } from '../modules/timezone/entities';
import { ClientEntity } from '../modules/client/entities'; import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
import { CommunityEntity } from '../modules/community/entities'; import { CommunityEntity } from '../modules/community/entities';
import { DeviceStatusLogEntity } from '../modules/device-status-log/entities'; import { DeviceStatusLogEntity } from '../modules/device-status-log/entities';
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
import { SceneDeviceEntity } from '../modules/scene-device/entities';
import { ProjectEntity } from '../modules/project/entities';
import { import {
DeviceNotificationEntity, SpaceModelEntity,
DeviceUserPermissionEntity, SubspaceModelEntity,
} from '../modules/device/entities'; TagModel,
SpaceModelProductAllocationEntity,
SubspaceModelProductAllocationEntity,
} from '../modules/space-model/entities';
import { import {
InviteUserEntity, InviteUserEntity,
InviteUserSpaceEntity, InviteUserSpaceEntity,
} from '../modules/Invite-user/entities'; } from '../modules/Invite-user/entities';
import { SpaceDailyOccupancyDurationEntity } from '../modules/occupancy/entities'; import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
import { AutomationEntity } from '../modules/automation/entities';
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
import { SpaceEntity } from '../modules/space/entities/space.entity';
import { SpaceLinkEntity } from '../modules/space/entities/space-link.entity';
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
import { TagEntity } from '../modules/space/entities/tag.entity';
import { ClientEntity } from '../modules/client/entities';
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger';
import { createLogger } from 'winston';
import { winstonLoggerOptions } from '../logger/services/winston.logger';
import { import {
PowerClampDailyEntity, PowerClampDailyEntity,
PowerClampHourlyEntity, PowerClampHourlyEntity,
@ -37,29 +55,6 @@ import {
PresenceSensorDailyDeviceEntity, PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity, PresenceSensorDailySpaceEntity,
} from '../modules/presence-sensor/entities'; } from '../modules/presence-sensor/entities';
import { ProjectEntity } from '../modules/project/entities';
import { RegionEntity } from '../modules/region/entities';
import { RoleTypeEntity } from '../modules/role-type/entities';
import { SceneDeviceEntity } from '../modules/scene-device/entities';
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
import {
SpaceModelEntity,
SpaceModelProductAllocationEntity,
SubspaceModelEntity,
SubspaceModelProductAllocationEntity,
} from '../modules/space-model/entities';
import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
import { SpaceEntity } from '../modules/space/entities/space.entity';
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
import { TimeZoneEntity } from '../modules/timezone/entities';
import {
UserNotificationEntity,
UserSpaceEntity,
} from '../modules/user/entities';
import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
@Module({ @Module({
imports: [ imports: [
TypeOrmModule.forRootAsync({ TypeOrmModule.forRootAsync({
@ -88,7 +83,9 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
PermissionTypeEntity, PermissionTypeEntity,
CommunityEntity, CommunityEntity,
SpaceEntity, SpaceEntity,
SpaceLinkEntity,
SubspaceEntity, SubspaceEntity,
TagEntity,
UserSpaceEntity, UserSpaceEntity,
DeviceUserPermissionEntity, DeviceUserPermissionEntity,
RoleTypeEntity, RoleTypeEntity,
@ -103,6 +100,7 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
SceneDeviceEntity, SceneDeviceEntity,
SpaceModelEntity, SpaceModelEntity,
SubspaceModelEntity, SubspaceModelEntity,
TagModel,
InviteUserEntity, InviteUserEntity,
InviteUserSpaceEntity, InviteUserSpaceEntity,
InviteSpaceEntity, InviteSpaceEntity,
@ -117,10 +115,6 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
PowerClampMonthlyEntity, PowerClampMonthlyEntity,
PresenceSensorDailyDeviceEntity, PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity, PresenceSensorDailySpaceEntity,
AqiSpaceDailyPollutantStatsEntity,
SpaceDailyOccupancyDurationEntity,
BookableSpaceEntity,
BookingEntity,
], ],
namingStrategy: new SnakeNamingStrategy(), namingStrategy: new SnakeNamingStrategy(),
synchronize: Boolean(JSON.parse(configService.get('DB_SYNC'))), synchronize: Boolean(JSON.parse(configService.get('DB_SYNC'))),
@ -129,8 +123,8 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
logger: typeOrmLogger, logger: typeOrmLogger,
extra: { extra: {
charset: 'utf8mb4', charset: 'utf8mb4',
max: 100, // set pool max size max: 20, // set pool max size
idleTimeoutMillis: 3000, // close idle clients after 5 second idleTimeoutMillis: 5000, // close idle clients after 5 second
connectionTimeoutMillis: 12_000, // return an error after 11 second if connection could not be established connectionTimeoutMillis: 12_000, // return an error after 11 second if connection could not be established
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion) maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
}, },

View File

@ -1,9 +1,10 @@
import { ApiProperty } from '@nestjs/swagger'; import { IsBoolean, IsDate, IsOptional } from 'class-validator';
import { Transform } from 'class-transformer';
import { IsBoolean, IsOptional } from 'class-validator';
import { BooleanValues } from '../constants/boolean-values.enum';
import { IsPageRequestParam } from '../validators/is-page-request-param.validator'; import { IsPageRequestParam } from '../validators/is-page-request-param.validator';
import { ApiProperty } from '@nestjs/swagger';
import { IsSizeRequestParam } from '../validators/is-size-request-param.validator'; import { IsSizeRequestParam } from '../validators/is-size-request-param.validator';
import { Transform } from 'class-transformer';
import { parseToDate } from '../util/parseToDate';
import { BooleanValues } from '../constants/boolean-values.enum';
export class PaginationRequestGetListDto { export class PaginationRequestGetListDto {
@ApiProperty({ @ApiProperty({
@ -18,7 +19,6 @@ export class PaginationRequestGetListDto {
return value.obj.includeSpaces === BooleanValues.TRUE; return value.obj.includeSpaces === BooleanValues.TRUE;
}) })
public includeSpaces?: boolean = false; public includeSpaces?: boolean = false;
@IsOptional() @IsOptional()
@IsPageRequestParam({ @IsPageRequestParam({
message: 'Page must be bigger than 0', message: 'Page must be bigger than 0',
@ -40,4 +40,40 @@ export class PaginationRequestGetListDto {
description: 'Size request', description: 'Size request',
}) })
size?: number; size?: number;
@IsOptional()
@ApiProperty({
name: 'name',
required: false,
description: 'Name to be filtered',
})
name?: string;
@ApiProperty({
name: 'from',
required: false,
type: Number,
description: `Start time in UNIX timestamp format to filter`,
example: 1674172800000,
})
@IsOptional()
@Transform(({ value }) => parseToDate(value))
@IsDate({
message: `From must be in UNIX timestamp format in order to parse to Date instance`,
})
from?: Date;
@ApiProperty({
name: 'to',
required: false,
type: Number,
description: `End time in UNIX timestamp format to filter`,
example: 1674259200000,
})
@IsOptional()
@Transform(({ value }) => parseToDate(value))
@IsDate({
message: `To must be in UNIX timestamp format in order to parse to Date instance`,
})
to?: Date;
} }

View File

@ -3,12 +3,26 @@ import { DeviceStatusFirebaseController } from './controllers/devices-status.con
import { DeviceStatusFirebaseService } from './services/devices-status.service'; import { DeviceStatusFirebaseService } from './services/devices-status.service';
import { DeviceRepository } from '@app/common/modules/device/repositories'; import { DeviceRepository } from '@app/common/modules/device/repositories';
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories/device-status.repository'; import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories/device-status.repository';
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
import {
PowerClampHourlyRepository,
PowerClampDailyRepository,
PowerClampMonthlyRepository,
} from '@app/common/modules/power-clamp/repositories';
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
import { OccupancyService } from '@app/common/helper/services/occupancy.service';
@Module({ @Module({
providers: [ providers: [
DeviceStatusFirebaseService, DeviceStatusFirebaseService,
DeviceRepository, DeviceRepository,
DeviceStatusLogRepository, DeviceStatusLogRepository,
PowerClampService,
PowerClampHourlyRepository,
PowerClampDailyRepository,
PowerClampMonthlyRepository,
SqlLoaderService,
OccupancyService,
], ],
controllers: [DeviceStatusFirebaseController], controllers: [DeviceStatusFirebaseController],
exports: [DeviceStatusFirebaseService, DeviceStatusLogRepository], exports: [DeviceStatusFirebaseService, DeviceStatusLogRepository],

View File

@ -13,7 +13,6 @@ class StatusDto {
@IsNotEmpty() @IsNotEmpty()
value: any; value: any;
t?: string | number | Date;
} }
export class AddDeviceStatusDto { export class AddDeviceStatusDto {

View File

@ -18,15 +18,20 @@ import {
runTransaction, runTransaction,
} from 'firebase/database'; } from 'firebase/database';
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories'; import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories';
import { ProductType } from '@app/common/constants/product-type.enum';
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
import { PowerClampEnergyEnum } from '@app/common/constants/power.clamp.enargy.enum';
import { PresenceSensorEnum } from '@app/common/constants/presence.sensor.enum';
import { OccupancyService } from '@app/common/helper/services/occupancy.service';
@Injectable() @Injectable()
export class DeviceStatusFirebaseService { export class DeviceStatusFirebaseService {
private tuya: TuyaContext; private tuya: TuyaContext;
private firebaseDb: Database; private firebaseDb: Database;
private readonly isDevEnv: boolean;
constructor( constructor(
private readonly configService: ConfigService, private readonly configService: ConfigService,
private readonly deviceRepository: DeviceRepository, private readonly deviceRepository: DeviceRepository,
private readonly powerClampService: PowerClampService,
private readonly occupancyService: OccupancyService,
private deviceStatusLogRepository: DeviceStatusLogRepository, private deviceStatusLogRepository: DeviceStatusLogRepository,
) { ) {
const accessKey = this.configService.get<string>('auth-config.ACCESS_KEY'); const accessKey = this.configService.get<string>('auth-config.ACCESS_KEY');
@ -40,8 +45,6 @@ export class DeviceStatusFirebaseService {
// Initialize firebaseDb using firebaseDataBase function // Initialize firebaseDb using firebaseDataBase function
this.firebaseDb = firebaseDataBase(this.configService); this.firebaseDb = firebaseDataBase(this.configService);
this.isDevEnv =
this.configService.get<string>('NODE_ENV') === 'development';
} }
async addDeviceStatusByDeviceUuid( async addDeviceStatusByDeviceUuid(
deviceTuyaUuid: string, deviceTuyaUuid: string,
@ -56,7 +59,7 @@ export class DeviceStatusFirebaseService {
const deviceStatusSaved = await this.createDeviceStatusFirebase({ const deviceStatusSaved = await this.createDeviceStatusFirebase({
deviceUuid: device.uuid, deviceUuid: device.uuid,
deviceTuyaUuid: deviceTuyaUuid, deviceTuyaUuid: deviceTuyaUuid,
status: deviceStatus?.status, status: deviceStatus.status,
productUuid: deviceStatus.productUuid, productUuid: deviceStatus.productUuid,
productType: deviceStatus.productType, productType: deviceStatus.productType,
}); });
@ -71,94 +74,25 @@ export class DeviceStatusFirebaseService {
); );
} }
} }
async addBatchDeviceStatusToOurDb(
batch: {
deviceTuyaUuid: string;
status: any;
log: any;
device: any;
}[],
): Promise<void> {
const allLogs = [];
console.log(`🔁 Preparing logs from batch of ${batch.length} items...`);
for (const item of batch) {
const device = item.device;
if (!device?.uuid) {
console.log(`⛔ Skipped unknown device: ${item.deviceTuyaUuid}`);
continue;
}
const logs = item.log.properties.map((property) =>
this.deviceStatusLogRepository.create({
deviceId: device.uuid,
deviceTuyaId: item.deviceTuyaUuid,
productId: item.log.productId,
log: item.log,
code: property.code,
value: property.value,
eventId: item.log.dataId,
eventTime: new Date(property.time).toISOString(),
}),
);
allLogs.push(...logs);
}
console.log(`📝 Total logs to insert: ${allLogs.length}`);
const insertLogsPromise = (async () => {
const chunkSize = 300;
let insertedCount = 0;
for (let i = 0; i < allLogs.length; i += chunkSize) {
const chunk = allLogs.slice(i, i + chunkSize);
try {
const result = await this.deviceStatusLogRepository
.createQueryBuilder()
.insert()
.into('device-status-log') // or use DeviceStatusLogEntity
.values(chunk)
.orIgnore() // skip duplicates
.execute();
insertedCount += result.identifiers.length;
console.log(
`✅ Inserted ${result.identifiers.length} / ${chunk.length} logs (chunk)`,
);
} catch (error) {
console.error('❌ Insert error (skipped chunk):', error.message);
}
}
console.log(
`✅ Total logs inserted: ${insertedCount} / ${allLogs.length}`,
);
})();
await insertLogsPromise;
}
async addDeviceStatusToFirebase( async addDeviceStatusToFirebase(
addDeviceStatusDto: AddDeviceStatusDto & { device?: any }, addDeviceStatusDto: AddDeviceStatusDto,
): Promise<AddDeviceStatusDto | null> { ): Promise<AddDeviceStatusDto | null> {
try { try {
let device = addDeviceStatusDto.device; const device = await this.getDeviceByDeviceTuyaUuid(
if (!device) {
device = await this.getDeviceByDeviceTuyaUuid(
addDeviceStatusDto.deviceTuyaUuid, addDeviceStatusDto.deviceTuyaUuid,
); );
}
if (device?.uuid) { if (device?.uuid) {
return await this.createDeviceStatusFirebase({ return await this.createDeviceStatusFirebase({
deviceUuid: device.uuid, deviceUuid: device.uuid,
...addDeviceStatusDto, ...addDeviceStatusDto,
productType: device.productDevice?.prodType, productType: device.productDevice.prodType,
}); });
} }
// Return null if device not found or no UUID // Return null if device not found or no UUID
return null; return null;
} catch (error) { } catch (error) {
// Handle the error silently, perhaps log it internally or ignore it
return null; return null;
} }
} }
@ -172,15 +106,6 @@ export class DeviceStatusFirebaseService {
relations: ['productDevice'], relations: ['productDevice'],
}); });
} }
async getAllDevices() {
return await this.deviceRepository.find({
where: {
isActive: true,
},
relations: ['productDevice'],
});
}
async getDevicesInstructionStatus(deviceUuid: string) { async getDevicesInstructionStatus(deviceUuid: string) {
try { try {
const deviceDetails = await this.getDeviceByDeviceUuid(deviceUuid); const deviceDetails = await this.getDeviceByDeviceUuid(deviceUuid);
@ -195,7 +120,7 @@ export class DeviceStatusFirebaseService {
return { return {
productUuid: deviceDetails.productDevice.uuid, productUuid: deviceDetails.productDevice.uuid,
productType: deviceDetails.productDevice.prodType, productType: deviceDetails.productDevice.prodType,
status: deviceStatus.result[0]?.status, status: deviceStatus.result[0].status,
}; };
} catch (error) { } catch (error) {
throw new HttpException( throw new HttpException(
@ -260,18 +185,18 @@ export class DeviceStatusFirebaseService {
if (!existingData.productType) { if (!existingData.productType) {
existingData.productType = addDeviceStatusDto.productType; existingData.productType = addDeviceStatusDto.productType;
} }
if (!existingData?.status) { if (!existingData.status) {
existingData.status = []; existingData.status = [];
} }
// Create a map to track existing status codes // Create a map to track existing status codes
const statusMap = new Map( const statusMap = new Map(
existingData?.status.map((item) => [item.code, item.value]), existingData.status.map((item) => [item.code, item.value]),
); );
// Update or add status codes // Update or add status codes
for (const statusItem of addDeviceStatusDto?.status) { for (const statusItem of addDeviceStatusDto.status) {
statusMap.set(statusItem.code, statusItem.value); statusMap.set(statusItem.code, statusItem.value);
} }
@ -284,6 +209,60 @@ export class DeviceStatusFirebaseService {
return existingData; return existingData;
}); });
// Save logs to your repository
const newLogs = addDeviceStatusDto.log.properties.map((property) => {
return this.deviceStatusLogRepository.create({
deviceId: addDeviceStatusDto.deviceUuid,
deviceTuyaId: addDeviceStatusDto.deviceTuyaUuid,
productId: addDeviceStatusDto.log.productId,
log: addDeviceStatusDto.log,
code: property.code,
value: property.value,
eventId: addDeviceStatusDto.log.dataId,
eventTime: new Date(property.time).toISOString(),
});
});
await this.deviceStatusLogRepository.save(newLogs);
if (addDeviceStatusDto.productType === ProductType.PC) {
const energyCodes = new Set([
PowerClampEnergyEnum.ENERGY_CONSUMED,
PowerClampEnergyEnum.ENERGY_CONSUMED_A,
PowerClampEnergyEnum.ENERGY_CONSUMED_B,
PowerClampEnergyEnum.ENERGY_CONSUMED_C,
]);
const energyStatus = addDeviceStatusDto?.log?.properties?.find((status) =>
energyCodes.has(status.code),
);
if (energyStatus) {
await this.powerClampService.updateEnergyConsumedHistoricalData(
addDeviceStatusDto.deviceUuid,
);
}
}
if (
addDeviceStatusDto.productType === ProductType.CPS ||
addDeviceStatusDto.productType === ProductType.WPS
) {
const occupancyCodes = new Set([PresenceSensorEnum.PRESENCE_STATE]);
const occupancyStatus = addDeviceStatusDto?.log?.properties?.find(
(status) => occupancyCodes.has(status.code),
);
if (occupancyStatus) {
await this.occupancyService.updateOccupancySensorHistoricalData(
addDeviceStatusDto.deviceUuid,
);
await this.occupancyService.updateOccupancySensorHistoricalDurationData(
addDeviceStatusDto.deviceUuid,
);
}
}
// Return the updated data // Return the updated data
const snapshot: DataSnapshot = await get(dataRef); const snapshot: DataSnapshot = await get(dataRef);
return snapshot.val(); return snapshot.val();

View File

@ -8,10 +8,7 @@ import { TuyaWebSocketService } from './services/tuya.web.socket.service';
import { OneSignalService } from './services/onesignal.service'; import { OneSignalService } from './services/onesignal.service';
import { DeviceMessagesService } from './services/device.messages.service'; import { DeviceMessagesService } from './services/device.messages.service';
import { DeviceRepositoryModule } from '../modules/device/device.repository.module'; import { DeviceRepositoryModule } from '../modules/device/device.repository.module';
import { import { DeviceNotificationRepository } from '../modules/device/repositories';
DeviceNotificationRepository,
DeviceRepository,
} from '../modules/device/repositories';
import { DeviceStatusFirebaseModule } from '../firebase/devices-status/devices-status.module'; import { DeviceStatusFirebaseModule } from '../firebase/devices-status/devices-status.module';
import { CommunityPermissionService } from './services/community.permission.service'; import { CommunityPermissionService } from './services/community.permission.service';
import { CommunityRepository } from '../modules/community/repositories'; import { CommunityRepository } from '../modules/community/repositories';
@ -30,7 +27,6 @@ import { SosHandlerService } from './services/sos.handler.service';
DeviceNotificationRepository, DeviceNotificationRepository,
CommunityRepository, CommunityRepository,
SosHandlerService, SosHandlerService,
DeviceRepository,
], ],
exports: [ exports: [
HelperHashService, HelperHashService,

View File

@ -1,66 +0,0 @@
import { Injectable } from '@nestjs/common';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable()
export class AqiDataService {
constructor(
private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource,
) {}
async updateAQISensorHistoricalData(): Promise<void> {
try {
const { dateStr } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'proceduce_update_daily_space_aqi',
[dateStr],
'fact_daily_space_aqi',
),
]);
} catch (err) {
console.error('Failed to update AQI sensor historical data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
};
}
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
);
}
console.error(`Failed to execute ${procedureFileName}:`, err);
throw err;
}
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}
}

View File

@ -1,69 +1,66 @@
import { DeviceRepository } from '@app/common/modules/device/repositories';
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { SqlLoaderService } from './sql-loader.service';
import { DataSource } from 'typeorm'; import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path'; import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable() @Injectable()
export class OccupancyService { export class OccupancyService {
constructor( constructor(
private readonly sqlLoader: SqlLoaderService, private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource, private readonly dataSource: DataSource,
private readonly deviceRepository: DeviceRepository,
) {} ) {}
async updateOccupancySensorHistoricalDurationData(
async updateOccupancyDataProcedures(): Promise<void> { deviceUuid: string,
try {
const { dateStr } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'procedure_update_fact_space_occupancy',
[dateStr],
'fact_space_occupancy_count',
),
this.executeProcedureWithRetry(
'procedure_update_daily_space_occupancy_duration',
[dateStr],
'fact_daily_space_occupancy_duration',
),
]);
} catch (err) {
console.error('Failed to update occupancy data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
};
}
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
): Promise<void> { ): Promise<void> {
try { try {
const query = this.loadQuery(folderName, procedureFileName); const now = new Date();
await this.dataSource.query(query, params); const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
console.log(`Procedure ${procedureFileName} executed successfully.`); const device = await this.deviceRepository.findOne({
} catch (err) { where: { uuid: deviceUuid },
if (retries > 0) { relations: ['spaceDevice'],
const delayMs = 1000 * (4 - retries); // Exponential backoff });
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs)); await this.executeProcedure(
return this.executeProcedureWithRetry( 'fact_daily_space_occupancy_duration',
procedureFileName, 'procedure_update_daily_space_occupancy_duration',
params, [dateStr, device.spaceDevice?.uuid],
folderName,
retries - 1,
); );
} } catch (err) {
console.error(`Failed to execute ${procedureFileName}:`, err); console.error('Failed to insert or update occupancy duration data:', err);
throw err; throw err;
} }
} }
async updateOccupancySensorHistoricalData(deviceUuid: string): Promise<void> {
try {
const now = new Date();
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
const device = await this.deviceRepository.findOne({
where: { uuid: deviceUuid },
relations: ['spaceDevice'],
});
await this.executeProcedure(
'fact_space_occupancy_count',
'procedure_update_fact_space_occupancy',
[dateStr, device.spaceDevice?.uuid],
);
} catch (err) {
console.error('Failed to insert or update occupancy data:', err);
throw err;
}
}
private async executeProcedure(
procedureFolderName: string,
procedureFileName: string,
params: (string | number | null)[],
): Promise<void> {
const query = this.loadQuery(procedureFolderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
}
private loadQuery(folderName: string, fileName: string): string { private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH); return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);

View File

@ -1,7 +1,7 @@
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { SqlLoaderService } from './sql-loader.service';
import { DataSource } from 'typeorm'; import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path'; import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable() @Injectable()
export class PowerClampService { export class PowerClampService {
@ -10,74 +10,50 @@ export class PowerClampService {
private readonly dataSource: DataSource, private readonly dataSource: DataSource,
) {} ) {}
async updateEnergyConsumedHistoricalData(): Promise<void> { async updateEnergyConsumedHistoricalData(deviceUuid: string): Promise<void> {
try { try {
const { dateStr, monthYear } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'fact_hourly_device_energy_consumed_procedure',
[dateStr],
'fact_device_energy_consumed',
),
this.executeProcedureWithRetry(
'fact_daily_device_energy_consumed_procedure',
[dateStr],
'fact_device_energy_consumed',
),
this.executeProcedureWithRetry(
'fact_monthly_device_energy_consumed_procedure',
[monthYear],
'fact_device_energy_consumed',
),
]);
} catch (err) {
console.error('Failed to update energy consumption data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string; monthYear: string } {
const now = new Date(); const now = new Date();
return { const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD const hour = now.getHours();
monthYear: now const monthYear = now
.toLocaleDateString('en-US', { .toLocaleDateString('en-US', {
month: '2-digit', month: '2-digit',
year: 'numeric', year: 'numeric',
}) })
.replace('/', '-'), // MM-YYYY .replace('/', '-'); // MM-YYYY
};
}
private async executeProcedureWithRetry( await this.executeProcedure(
procedureFileName: string, 'fact_hourly_device_energy_consumed_procedure',
params: (string | number | null)[], [deviceUuid, dateStr, hour],
folderName: string,
retries = 3,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
); );
}
console.error(`Failed to execute ${procedureFileName}:`, err); await this.executeProcedure(
'fact_daily_device_energy_consumed_procedure',
[deviceUuid, dateStr],
);
await this.executeProcedure(
'fact_monthly_device_energy_consumed_procedure',
[deviceUuid, monthYear],
);
} catch (err) {
console.error('Failed to insert or update energy data:', err);
throw err; throw err;
} }
} }
private async executeProcedure(
procedureFileName: string,
params: (string | number | null)[],
): Promise<void> {
const query = this.loadQuery(
'fact_device_energy_consumed',
procedureFileName,
);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
}
private loadQuery(folderName: string, fileName: string): string { private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH); return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
} }

View File

@ -16,46 +16,21 @@ export class SosHandlerService {
); );
} }
async handleSosEventFirebase(device: any, logData: any): Promise<void> { async handleSosEvent(devId: string, logData: any): Promise<void> {
const sosTrueStatus = [{ code: 'sos', value: true }];
const sosFalseStatus = [{ code: 'sos', value: false }];
try { try {
// ✅ Send true status
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({ await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: device.deviceTuyaUuid, deviceTuyaUuid: devId,
status: sosTrueStatus, status: [{ code: 'sos', value: true }],
log: logData, log: logData,
device,
}); });
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
{
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosTrueStatus,
log: logData,
device,
},
]);
// ✅ Schedule false status
setTimeout(async () => { setTimeout(async () => {
try { try {
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({ await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: device.deviceTuyaUuid, deviceTuyaUuid: devId,
status: sosFalseStatus, status: [{ code: 'sos', value: false }],
log: logData, log: logData,
device,
}); });
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
{
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosFalseStatus,
log: logData,
device,
},
]);
} catch (err) { } catch (err) {
this.logger.error('Failed to send SOS false value', err); this.logger.error('Failed to send SOS false value', err);
} }

View File

@ -1,24 +1,13 @@
import { Injectable, OnModuleInit } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import TuyaWebsocket from '../../config/tuya-web-socket-config'; import TuyaWebsocket from '../../config/tuya-web-socket-config';
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import { DeviceStatusFirebaseService } from '@app/common/firebase/devices-status/services/devices-status.service'; import { DeviceStatusFirebaseService } from '@app/common/firebase/devices-status/services/devices-status.service';
import { SosHandlerService } from './sos.handler.service'; import { SosHandlerService } from './sos.handler.service';
import * as NodeCache from 'node-cache';
@Injectable() @Injectable()
export class TuyaWebSocketService implements OnModuleInit { export class TuyaWebSocketService {
private client: any; private client: any;
private readonly isDevEnv: boolean; private readonly isDevEnv: boolean;
private readonly deviceCache = new NodeCache({ stdTTL: 7200 }); // TTL = 2 hour
private messageQueue: {
devId: string;
status: any;
logData: any;
device: any;
}[] = [];
private isProcessing = false;
constructor( constructor(
private readonly configService: ConfigService, private readonly configService: ConfigService,
@ -37,36 +26,16 @@ export class TuyaWebSocketService implements OnModuleInit {
}); });
if (this.configService.get<string>('tuya-config.TRUN_ON_TUYA_SOCKET')) { if (this.configService.get<string>('tuya-config.TRUN_ON_TUYA_SOCKET')) {
// Set up event handlers
this.setupEventHandlers(); this.setupEventHandlers();
// Start receiving messages
this.client.start(); this.client.start();
} }
// Run the queue processor every 15 seconds
setInterval(() => this.processQueue(), 15000);
// Refresh the cache every 1 hour
setInterval(() => this.initializeDeviceCache(), 30 * 60 * 1000); // 30 minutes
}
async onModuleInit() {
await this.initializeDeviceCache();
}
private async initializeDeviceCache() {
try {
const allDevices = await this.deviceStatusFirebaseService.getAllDevices();
allDevices.forEach((device) => {
if (device.deviceTuyaUuid) {
this.deviceCache.set(device.deviceTuyaUuid, device);
}
});
console.log(`✅ Refreshed cache with ${allDevices.length} devices.`);
} catch (error) {
console.error('❌ Failed to initialize device cache:', error);
}
} }
private setupEventHandlers() { private setupEventHandlers() {
// Event handlers
this.client.open(() => { this.client.open(() => {
console.log('open'); console.log('open');
}); });
@ -74,38 +43,23 @@ export class TuyaWebSocketService implements OnModuleInit {
this.client.message(async (ws: WebSocket, message: any) => { this.client.message(async (ws: WebSocket, message: any) => {
try { try {
const { devId, status, logData } = this.extractMessageData(message); const { devId, status, logData } = this.extractMessageData(message);
if (!Array.isArray(logData?.properties)) {
this.client.ackMessage(message.messageId);
return;
}
const device = this.deviceCache.get(devId);
if (!device) {
// console.log(⛔ Unknown device: ${devId}, message ignored.);
this.client.ackMessage(message.messageId);
return;
}
if (this.sosHandlerService.isSosTriggered(status)) { if (this.sosHandlerService.isSosTriggered(status)) {
await this.sosHandlerService.handleSosEventFirebase(devId, logData); await this.sosHandlerService.handleSosEvent(devId, logData);
} else { } else {
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({ await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: devId, deviceTuyaUuid: devId,
status, status: status,
log: logData, log: logData,
device,
}); });
} }
// Push to internal queue
this.messageQueue.push({ devId, status, logData, device });
// Acknowledge the message
this.client.ackMessage(message.messageId); this.client.ackMessage(message.messageId);
} catch (error) { } catch (error) {
console.error('Error receiving message:', error); console.error('Error processing message:', error);
} }
}); });
this.client.reconnect(() => { this.client.reconnect(() => {
console.log('reconnect'); console.log('reconnect');
}); });
@ -126,37 +80,6 @@ export class TuyaWebSocketService implements OnModuleInit {
console.error('WebSocket error:', error); console.error('WebSocket error:', error);
}); });
} }
private async processQueue() {
if (this.isProcessing) {
console.log('⏳ Skipping: still processing previous batch');
return;
}
if (this.messageQueue.length === 0) return;
this.isProcessing = true;
const batch = [...this.messageQueue];
this.messageQueue = [];
console.log(`🔁 Processing batch of size: ${batch.length}`);
try {
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb(
batch.map((item) => ({
deviceTuyaUuid: item.devId,
status: item.status,
log: item.logData,
device: item.device,
})),
);
} catch (error) {
console.error('❌ Error processing batch:', error);
this.messageQueue.unshift(...batch); // retry
} finally {
this.isProcessing = false;
}
}
private extractMessageData(message: any): { private extractMessageData(message: any): {
devId: string; devId: string;
status: any; status: any;

View File

@ -1,5 +0,0 @@
// Convert time string (HH:mm) to minutes
export function timeToMinutes(time: string): number {
const [hours, minutes] = time.split(':').map(Number);
return hours * 60 + minutes;
}

View File

@ -49,12 +49,12 @@ export class TuyaService {
path, path,
}); });
// if (!response.success) { if (!response.success) {
// throw new HttpException( throw new HttpException(
// `Error fetching device details: ${response.msg}`, `Error fetching device details: ${response.msg}`,
// HttpStatus.BAD_REQUEST, HttpStatus.BAD_REQUEST,
// ); );
// } }
return response.result; return response.result;
} }

View File

@ -1,18 +1,32 @@
import { utilities as nestWinstonModuleUtilities } from 'nest-winston'; import { utilities as nestWinstonModuleUtilities } from 'nest-winston';
import * as winston from 'winston'; import * as winston from 'winston';
const environment = process.env.NODE_ENV || 'local';
export const winstonLoggerOptions: winston.LoggerOptions = { export const winstonLoggerOptions: winston.LoggerOptions = {
level: level:
process.env.AZURE_POSTGRESQL_DATABASE === 'development' ? 'debug' : 'error', environment === 'local'
? 'debug'
: environment === 'development'
? 'warn'
: 'error',
transports: [ transports: [
new winston.transports.Console({ new winston.transports.Console({
level:
environment === 'local'
? 'debug'
: environment === 'development'
? 'warn'
: 'error',
format: winston.format.combine( format: winston.format.combine(
winston.format.timestamp(), winston.format.timestamp(),
nestWinstonModuleUtilities.format.nestLike('MyApp', { nestWinstonModuleUtilities.format.nestLike('MyApp', {
prettyPrint: true, prettyPrint: environment === 'local',
}), }),
), ),
}), }),
// Only create file logs if NOT local
...(environment !== 'local'
? [
new winston.transports.File({ new winston.transports.File({
filename: 'logs/error.log', filename: 'logs/error.log',
level: 'error', level: 'error',
@ -20,7 +34,10 @@ export const winstonLoggerOptions: winston.LoggerOptions = {
}), }),
new winston.transports.File({ new winston.transports.File({
filename: 'logs/combined.log', filename: 'logs/combined.log',
level: 'info',
format: winston.format.json(), format: winston.format.json(),
}), }),
]
: []),
], ],
}; };

View File

@ -1,6 +1,6 @@
import { RoleType } from '@app/common/constants/role.type.enum'; import { RoleType } from '@app/common/constants/role.type.enum';
import { UserStatusEnum } from '@app/common/constants/user-status.enum'; import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { IsEnum, IsNotEmpty, IsOptional, IsString } from 'class-validator'; import { IsEnum, IsNotEmpty, IsString } from 'class-validator';
export class InviteUserDto { export class InviteUserDto {
@IsString() @IsString()
@ -12,12 +12,8 @@ export class InviteUserDto {
public email: string; public email: string;
@IsString() @IsString()
@IsOptional() @IsNotEmpty()
public jobTitle?: string; public jobTitle: string;
@IsString()
@IsOptional()
public companyName?: string;
@IsEnum(UserStatusEnum) @IsEnum(UserStatusEnum)
@IsNotEmpty() @IsNotEmpty()

View File

@ -8,14 +8,14 @@ import {
Unique, Unique,
} from 'typeorm'; } from 'typeorm';
import { RoleType } from '@app/common/constants/role.type.enum';
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProjectEntity } from '../../project/entities';
import { RoleTypeEntity } from '../../role-type/entities'; import { RoleTypeEntity } from '../../role-type/entities';
import { SpaceEntity } from '../../space/entities/space.entity'; import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { UserEntity } from '../../user/entities'; import { UserEntity } from '../../user/entities';
import { RoleType } from '@app/common/constants/role.type.enum';
import { InviteUserDto, InviteUserSpaceDto } from '../dtos'; import { InviteUserDto, InviteUserSpaceDto } from '../dtos';
import { ProjectEntity } from '../../project/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity({ name: 'invite-user' }) @Entity({ name: 'invite-user' })
@Unique(['email', 'project']) @Unique(['email', 'project'])
@ -37,11 +37,6 @@ export class InviteUserEntity extends AbstractEntity<InviteUserDto> {
}) })
jobTitle: string; jobTitle: string;
@Column({
nullable: true,
})
companyName: string;
@Column({ @Column({
nullable: false, nullable: false,
enum: Object.values(UserStatusEnum), enum: Object.values(UserStatusEnum),
@ -87,10 +82,7 @@ export class InviteUserEntity extends AbstractEntity<InviteUserDto> {
onDelete: 'CASCADE', onDelete: 'CASCADE',
}) })
public roleType: RoleTypeEntity; public roleType: RoleTypeEntity;
@OneToOne(() => UserEntity, (user) => user.inviteUser, { @OneToOne(() => UserEntity, (user) => user.inviteUser, { nullable: true })
nullable: true,
onDelete: 'CASCADE',
})
@JoinColumn({ name: 'user_uuid' }) @JoinColumn({ name: 'user_uuid' })
user: UserEntity; user: UserEntity;
@OneToMany( @OneToMany(
@ -120,9 +112,7 @@ export class InviteUserSpaceEntity extends AbstractEntity<InviteUserSpaceDto> {
}) })
public uuid: string; public uuid: string;
@ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces, { @ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces)
onDelete: 'CASCADE',
})
@JoinColumn({ name: 'invite_user_uuid' }) @JoinColumn({ name: 'invite_user_uuid' })
public inviteUser: InviteUserEntity; public inviteUser: InviteUserEntity;

View File

@ -1,11 +0,0 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { AqiSpaceDailyPollutantStatsEntity } from './entities/aqi.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([AqiSpaceDailyPollutantStatsEntity])],
})
export class AqiRepositoryModule {}

View File

@ -1,82 +0,0 @@
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
export class AqiSpaceDailyPollutantStatsDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsNotEmpty()
@IsString()
spaceUuid: string;
@IsNotEmpty()
@IsString()
eventDay: string;
@IsNotEmpty()
@IsNumber()
eventHour: number;
@IsNumber()
pm1Min: number;
@IsNumber()
pm1Avg: number;
@IsNumber()
pm1Max: number;
@IsNumber()
pm10Min: number;
@IsNumber()
pm10Avg: number;
@IsNumber()
pm10Max: number;
@IsNumber()
pm25Min: number;
@IsNumber()
pm25Avg: number;
@IsNumber()
pm25Max: number;
@IsNumber()
ch2oMin: number;
@IsNumber()
ch2oAvg: number;
@IsNumber()
ch2oMax: number;
@IsNumber()
vocMin: number;
@IsNumber()
vocAvg: number;
@IsNumber()
vocMax: number;
@IsNumber()
co2Min: number;
@IsNumber()
co2Avg: number;
@IsNumber()
co2Max: number;
@IsNumber()
aqiMin: number;
@IsNumber()
aqiAvg: number;
@IsNumber()
aqiMax: number;
}

View File

@ -1 +0,0 @@
export * from './aqi.dto';

View File

@ -1,184 +0,0 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { AqiSpaceDailyPollutantStatsDto } from '../dtos';
@Entity({ name: 'space-daily-pollutant-stats' })
@Unique(['spaceUuid', 'eventDate'])
export class AqiSpaceDailyPollutantStatsEntity extends AbstractEntity<AqiSpaceDailyPollutantStatsDto> {
@Column({ nullable: false })
public spaceUuid: string;
@ManyToOne(() => SpaceEntity, (space) => space.aqiSensorDaily)
space: SpaceEntity;
@Column({ type: 'date', nullable: false })
public eventDate: Date;
@Column('float', { nullable: true })
public goodAqiPercentage?: number;
@Column('float', { nullable: true })
public moderateAqiPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveAqiPercentage?: number;
@Column('float', { nullable: true })
public unhealthyAqiPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyAqiPercentage?: number;
@Column('float', { nullable: true })
public hazardousAqiPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgAqi?: number;
@Column('float', { nullable: true })
public dailyMaxAqi?: number;
@Column('float', { nullable: true })
public dailyMinAqi?: number;
@Column('float', { nullable: true })
public goodPm25Percentage?: number;
@Column('float', { nullable: true })
public moderatePm25Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitivePm25Percentage?: number;
@Column('float', { nullable: true })
public unhealthyPm25Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyPm25Percentage?: number;
@Column('float', { nullable: true })
public hazardousPm25Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgPm25?: number;
@Column('float', { nullable: true })
public dailyMaxPm25?: number;
@Column('float', { nullable: true })
public dailyMinPm25?: number;
@Column('float', { nullable: true })
public goodPm10Percentage?: number;
@Column('float', { nullable: true })
public moderatePm10Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitivePm10Percentage?: number;
@Column('float', { nullable: true })
public unhealthyPm10Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyPm10Percentage?: number;
@Column('float', { nullable: true })
public hazardousPm10Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgPm10?: number;
@Column('float', { nullable: true })
public dailyMaxPm10?: number;
@Column('float', { nullable: true })
public dailyMinPm10?: number;
@Column('float', { nullable: true })
public goodVocPercentage?: number;
@Column('float', { nullable: true })
public moderateVocPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveVocPercentage?: number;
@Column('float', { nullable: true })
public unhealthyVocPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyVocPercentage?: number;
@Column('float', { nullable: true })
public hazardousVocPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgVoc?: number;
@Column('float', { nullable: true })
public dailyMaxVoc?: number;
@Column('float', { nullable: true })
public dailyMinVoc?: number;
@Column('float', { nullable: true })
public goodCo2Percentage?: number;
@Column('float', { nullable: true })
public moderateCo2Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveCo2Percentage?: number;
@Column('float', { nullable: true })
public unhealthyCo2Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyCo2Percentage?: number;
@Column('float', { nullable: true })
public hazardousCo2Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgCo2?: number;
@Column('float', { nullable: true })
public dailyMaxCo2?: number;
@Column('float', { nullable: true })
public dailyMinCo2?: number;
@Column('float', { nullable: true })
public goodCh2oPercentage?: number;
@Column('float', { nullable: true })
public moderateCh2oPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveCh2oPercentage?: number;
@Column('float', { nullable: true })
public unhealthyCh2oPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyCh2oPercentage?: number;
@Column('float', { nullable: true })
public hazardousCh2oPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgCh2o?: number;
@Column('float', { nullable: true })
public dailyMaxCh2o?: number;
@Column('float', { nullable: true })
public dailyMinCh2o?: number;
constructor(partial: Partial<AqiSpaceDailyPollutantStatsEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -1 +0,0 @@
export * from './aqi.entity';

View File

@ -1,10 +0,0 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { AqiSpaceDailyPollutantStatsEntity } from '../entities';
@Injectable()
export class AqiSpaceDailyPollutantStatsRepository extends Repository<AqiSpaceDailyPollutantStatsEntity> {
constructor(private dataSource: DataSource) {
super(AqiSpaceDailyPollutantStatsEntity, dataSource.createEntityManager());
}
}

View File

@ -1 +0,0 @@
export * from './aqi.repository';

View File

@ -1,12 +0,0 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { BookableSpaceEntity } from './entities/bookable-space.entity';
import { BookingEntity } from './entities/booking.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([BookableSpaceEntity, BookingEntity])],
})
export class BookingRepositoryModule {}

View File

@ -1,51 +0,0 @@
import { DaysEnum } from '@app/common/constants/days.enum';
import {
Column,
CreateDateColumn,
Entity,
JoinColumn,
OneToOne,
UpdateDateColumn,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity('bookable-space')
export class BookableSpaceEntity extends AbstractEntity {
@Column({
type: 'uuid',
default: () => 'gen_random_uuid()',
nullable: false,
})
public uuid: string;
@OneToOne(() => SpaceEntity, (space) => space.bookableConfig)
@JoinColumn({ name: 'space_uuid' })
space: SpaceEntity;
@Column({
type: 'enum',
enum: DaysEnum,
array: true,
nullable: false,
})
daysAvailable: DaysEnum[];
@Column({ type: 'time' })
startTime: string;
@Column({ type: 'time' })
endTime: string;
@Column({ type: Boolean, default: true })
active: boolean;
@Column({ type: 'int', default: null })
points?: number;
@CreateDateColumn()
createdAt: Date;
@UpdateDateColumn()
updatedAt: Date;
}

View File

@ -1,44 +0,0 @@
import {
Column,
CreateDateColumn,
Entity,
ManyToOne,
UpdateDateColumn,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { UserEntity } from '../../user/entities';
@Entity('booking')
export class BookingEntity extends AbstractEntity {
@Column({
type: 'uuid',
default: () => 'gen_random_uuid()',
nullable: false,
})
public uuid: string;
@ManyToOne(() => SpaceEntity, (space) => space.bookableConfig)
space: SpaceEntity;
@ManyToOne(() => UserEntity, (user) => user.bookings)
user: UserEntity;
@Column({ type: Date, nullable: false })
date: Date;
@Column({ type: 'time' })
startTime: string;
@Column({ type: 'time' })
endTime: string;
@Column({ type: 'int', default: null })
cost?: number;
@CreateDateColumn()
createdAt: Date;
@UpdateDateColumn()
updatedAt: Date;
}

View File

@ -1,10 +0,0 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { BookableSpaceEntity } from '../entities/bookable-space.entity';
@Injectable()
export class BookableSpaceEntityRepository extends Repository<BookableSpaceEntity> {
constructor(private dataSource: DataSource) {
super(BookableSpaceEntity, dataSource.createEntityManager());
}
}

View File

@ -1,10 +0,0 @@
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm';
import { BookingEntity } from '../entities/booking.entity';
@Injectable()
export class BookingEntityRepository extends Repository<BookingEntity> {
constructor(private dataSource: DataSource) {
super(BookingEntity, dataSource.createEntityManager());
}
}

View File

@ -2,15 +2,15 @@ import { SourceType } from '@app/common/constants/source-type.enum';
import { Entity, Column, PrimaryColumn, Unique } from 'typeorm'; import { Entity, Column, PrimaryColumn, Unique } from 'typeorm';
@Entity('device-status-log') @Entity('device-status-log')
@Unique('event_time_idx', ['eventTime', 'deviceId', 'code', 'value']) @Unique('event_time_idx', ['eventTime'])
export class DeviceStatusLogEntity { export class DeviceStatusLogEntity {
@PrimaryColumn({ type: 'int', generated: true, unsigned: true }) @Column({ type: 'int', generated: true, unsigned: true })
id: number; id: number;
@Column({ type: 'text' }) @Column({ type: 'text' })
eventId: string; eventId: string;
@Column({ type: 'timestamptz' }) @PrimaryColumn({ type: 'timestamptz' })
eventTime: Date; eventTime: Date;
@Column({ @Column({

View File

@ -1,24 +1,24 @@
import { import {
Column, Column,
Entity, Entity,
Index,
JoinColumn,
ManyToOne, ManyToOne,
OneToMany, OneToMany,
Unique, Unique,
Index,
JoinColumn,
} from 'typeorm'; } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { PermissionTypeEntity } from '../../permission/entities'; import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
import { PresenceSensorDailyDeviceEntity } from '../../presence-sensor/entities';
import { ProductEntity } from '../../product/entities'; import { ProductEntity } from '../../product/entities';
import { UserEntity } from '../../user/entities';
import { DeviceNotificationDto } from '../dtos';
import { PermissionTypeEntity } from '../../permission/entities';
import { SceneDeviceEntity } from '../../scene-device/entities'; import { SceneDeviceEntity } from '../../scene-device/entities';
import { SpaceEntity } from '../../space/entities/space.entity'; import { SpaceEntity } from '../../space/entities/space.entity';
import { SubspaceEntity } from '../../space/entities/subspace/subspace.entity'; import { SubspaceEntity } from '../../space/entities/subspace/subspace.entity';
import { NewTagEntity } from '../../tag'; import { NewTagEntity } from '../../tag';
import { UserEntity } from '../../user/entities'; import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
import { DeviceNotificationDto } from '../dtos'; import { PresenceSensorDailyDeviceEntity } from '../../presence-sensor/entities';
import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
@Entity({ name: 'device' }) @Entity({ name: 'device' })
@Unique(['deviceTuyaUuid']) @Unique(['deviceTuyaUuid'])
@ -28,11 +28,6 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
}) })
deviceTuyaUuid: string; deviceTuyaUuid: string;
@Column({
nullable: true,
})
deviceTuyaConstUuid: string;
@Column({ @Column({
nullable: true, nullable: true,
default: true, default: true,
@ -83,8 +78,8 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
@OneToMany(() => SceneDeviceEntity, (sceneDevice) => sceneDevice.device, {}) @OneToMany(() => SceneDeviceEntity, (sceneDevice) => sceneDevice.device, {})
sceneDevices: SceneDeviceEntity[]; sceneDevices: SceneDeviceEntity[];
@ManyToOne(() => NewTagEntity, (tag) => tag.devices) @OneToMany(() => NewTagEntity, (tag) => tag.devices)
@JoinColumn({ name: 'tag_uuid' }) // @JoinTable({ name: 'device_tags' })
public tag: NewTagEntity; public tag: NewTagEntity;
@OneToMany(() => PowerClampHourlyEntity, (powerClamp) => powerClamp.device) @OneToMany(() => PowerClampHourlyEntity, (powerClamp) => powerClamp.device)
powerClampHourly: PowerClampHourlyEntity[]; powerClampHourly: PowerClampHourlyEntity[];
@ -116,7 +111,6 @@ export class DeviceNotificationEntity extends AbstractEntity<DeviceNotificationD
@ManyToOne(() => UserEntity, (user) => user.userPermission, { @ManyToOne(() => UserEntity, (user) => user.userPermission, {
nullable: false, nullable: false,
onDelete: 'CASCADE',
}) })
user: UserEntity; user: UserEntity;
@ -155,7 +149,6 @@ export class DeviceUserPermissionEntity extends AbstractEntity<DeviceUserPermiss
@ManyToOne(() => UserEntity, (user) => user.userPermission, { @ManyToOne(() => UserEntity, (user) => user.userPermission, {
nullable: false, nullable: false,
onDelete: 'CASCADE',
}) })
user: UserEntity; user: UserEntity;
constructor(partial: Partial<DeviceUserPermissionEntity>) { constructor(partial: Partial<DeviceUserPermissionEntity>) {

View File

@ -1 +0,0 @@
export * from './occupancy.dto';

View File

@ -1,23 +0,0 @@
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
export class SpaceDailyOccupancyDurationDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsString()
@IsNotEmpty()
public spaceUuid: string;
@IsString()
@IsNotEmpty()
public eventDate: string;
@IsNumber()
@IsNotEmpty()
public occupancyPercentage: number;
@IsNumber()
@IsNotEmpty()
public occupiedSeconds: number;
}

View File

@ -1 +0,0 @@
export * from './occupancy.entity';

View File

@ -1,32 +0,0 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SpaceDailyOccupancyDurationDto } from '../dtos';
@Entity({ name: 'space-daily-occupancy-duration' })
@Unique(['spaceUuid', 'eventDate'])
export class SpaceDailyOccupancyDurationEntity extends AbstractEntity<SpaceDailyOccupancyDurationDto> {
@Column({ nullable: false })
public spaceUuid: string;
@Column({ nullable: false, type: 'date' })
public eventDate: string;
public CountTotalPresenceDetected: number;
@ManyToOne(() => SpaceEntity, (space) => space.presenceSensorDaily)
space: SpaceEntity;
@Column({ type: 'int' })
occupancyPercentage: number;
@Column({ type: 'int', nullable: true })
occupiedSeconds?: number;
@Column({ type: 'int', nullable: true })
deviceCount?: number;
constructor(partial: Partial<SpaceDailyOccupancyDurationEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -1,11 +0,0 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { SpaceDailyOccupancyDurationEntity } from './entities/occupancy.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([SpaceDailyOccupancyDurationEntity])],
})
export class SpaceDailyOccupancyDurationRepositoryModule {}

View File

@ -1 +0,0 @@
export * from './occupancy.repository';

View File

@ -1,10 +0,0 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { SpaceDailyOccupancyDurationEntity } from '../entities/occupancy.entity';
@Injectable()
export class SpaceDailyOccupancyDurationEntityRepository extends Repository<SpaceDailyOccupancyDurationEntity> {
constructor(private dataSource: DataSource) {
super(SpaceDailyOccupancyDurationEntity, dataSource.createEntityManager());
}
}

View File

@ -1,7 +1,10 @@
import { Column, Entity, OneToMany } from 'typeorm'; import { Column, Entity, OneToMany } from 'typeorm';
import { ProductDto } from '../dtos';
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { DeviceEntity } from '../../device/entities'; import { DeviceEntity } from '../../device/entities';
import { ProductDto } from '../dtos'; import { TagModel } from '../../space-model';
import { TagEntity } from '../../space/entities/tag.entity';
import { NewTagEntity } from '../../tag/entities';
@Entity({ name: 'product' }) @Entity({ name: 'product' })
export class ProductEntity extends AbstractEntity<ProductDto> { export class ProductEntity extends AbstractEntity<ProductDto> {
@Column({ @Column({
@ -25,6 +28,15 @@ export class ProductEntity extends AbstractEntity<ProductDto> {
}) })
public prodType: string; public prodType: string;
@OneToMany(() => NewTagEntity, (tag) => tag.product, { cascade: true })
public newTags: NewTagEntity[];
@OneToMany(() => TagModel, (tag) => tag.product)
tagModels: TagModel[];
@OneToMany(() => TagEntity, (tag) => tag.product)
tags: TagEntity[];
@OneToMany( @OneToMany(
() => DeviceEntity, () => DeviceEntity,
(devicesProductEntity) => devicesProductEntity.productDevice, (devicesProductEntity) => devicesProductEntity.productDevice,

View File

@ -12,7 +12,6 @@ export class RoleTypeEntity extends AbstractEntity<RoleTypeDto> {
nullable: false, nullable: false,
enum: Object.values(RoleType), enum: Object.values(RoleType),
}) })
// why is this ts-type string not enum?
type: string; type: string;
@OneToMany(() => UserEntity, (inviteUser) => inviteUser.roleType, { @OneToMany(() => UserEntity, (inviteUser) => inviteUser.roleType, {
nullable: true, nullable: true,

View File

@ -0,0 +1,21 @@
import { IsNotEmpty, IsString } from 'class-validator';
export class TagModelDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsString()
@IsNotEmpty()
public name: string;
@IsString()
@IsNotEmpty()
public productUuid: string;
@IsString()
spaceModelUuid: string;
@IsString()
subspaceModelUuid: string;
}

View File

@ -1,3 +1,4 @@
export * from './space-model-product-allocation.entity';
export * from './space-model.entity'; export * from './space-model.entity';
export * from './subspace-model'; export * from './subspace-model';
export * from './tag-model.entity';
export * from './space-model-product-allocation.entity';

View File

@ -1,12 +1,18 @@
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm'; import {
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; Entity,
Column,
ManyToOne,
ManyToMany,
JoinTable,
OneToMany,
} from 'typeorm';
import { SpaceModelEntity } from './space-model.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { ProductEntity } from '../../product/entities/product.entity'; import { ProductEntity } from '../../product/entities/product.entity';
import { SpaceProductAllocationEntity } from '../../space/entities/space-product-allocation.entity'; import { SpaceProductAllocationEntity } from '../../space/entities/space-product-allocation.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceModelEntity } from './space-model.entity';
@Entity({ name: 'space_model_product_allocation' }) @Entity({ name: 'space_model_product_allocation' })
@Unique(['spaceModel', 'product', 'tag'])
export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModelProductAllocationEntity> { export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModelProductAllocationEntity> {
@Column({ @Column({
type: 'uuid', type: 'uuid',
@ -25,8 +31,9 @@ export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModel
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' }) @ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity; public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' }) @ManyToMany(() => NewTagEntity, { cascade: true, onDelete: 'CASCADE' })
public tag: NewTagEntity; @JoinTable({ name: 'space_model_product_tags' })
public tags: NewTagEntity[];
@OneToMany( @OneToMany(
() => SpaceProductAllocationEntity, () => SpaceProductAllocationEntity,

View File

@ -1,10 +1,11 @@
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm'; import { Entity, Column, OneToMany, ManyToOne, JoinColumn } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProjectEntity } from '../../project/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SpaceModelDto } from '../dtos'; import { SpaceModelDto } from '../dtos';
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
import { SubspaceModelEntity } from './subspace-model'; import { SubspaceModelEntity } from './subspace-model';
import { ProjectEntity } from '../../project/entities';
import { TagModel } from './tag-model.entity';
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity({ name: 'space-model' }) @Entity({ name: 'space-model' })
export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> { export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
@ -48,6 +49,9 @@ export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
}) })
public spaces: SpaceEntity[]; public spaces: SpaceEntity[];
@OneToMany(() => TagModel, (tag) => tag.spaceModel)
tags: TagModel[];
@OneToMany( @OneToMany(
() => SpaceModelProductAllocationEntity, () => SpaceModelProductAllocationEntity,
(allocation) => allocation.spaceModel, (allocation) => allocation.spaceModel,

View File

@ -1,12 +1,11 @@
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity'; import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
import { SubspaceModelEntity } from './subspace-model.entity';
import { ProductEntity } from '@app/common/modules/product/entities/product.entity'; import { ProductEntity } from '@app/common/modules/product/entities/product.entity';
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity'; import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { SubspaceModelProductAllocationDto } from '../../dtos/subspace-model/subspace-model-product-allocation.dto'; import { SubspaceModelProductAllocationDto } from '../../dtos/subspace-model/subspace-model-product-allocation.dto';
import { SubspaceModelEntity } from './subspace-model.entity'; import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
@Entity({ name: 'subspace_model_product_allocation' }) @Entity({ name: 'subspace_model_product_allocation' })
@Unique(['subspaceModel', 'product', 'tag'])
export class SubspaceModelProductAllocationEntity extends AbstractEntity<SubspaceModelProductAllocationDto> { export class SubspaceModelProductAllocationEntity extends AbstractEntity<SubspaceModelProductAllocationDto> {
@Column({ @Column({
type: 'uuid', type: 'uuid',
@ -28,8 +27,12 @@ export class SubspaceModelProductAllocationEntity extends AbstractEntity<Subspac
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' }) @ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity; public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' }) @ManyToMany(() => NewTagEntity, (tag) => tag.subspaceModelAllocations, {
public tag: NewTagEntity; cascade: true,
onDelete: 'CASCADE',
})
@JoinTable({ name: 'subspace_model_product_tags' })
public tags: NewTagEntity[];
constructor(partial: Partial<SubspaceModelProductAllocationEntity>) { constructor(partial: Partial<SubspaceModelProductAllocationEntity>) {
super(); super();

View File

@ -1,9 +1,10 @@
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity'; import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
import { Column, Entity, ManyToOne, OneToMany } from 'typeorm'; import { Column, Entity, ManyToOne, OneToMany } from 'typeorm';
import { SubSpaceModelDto } from '../../dtos'; import { SubSpaceModelDto } from '../../dtos';
import { SpaceModelEntity } from '../space-model.entity'; import { SpaceModelEntity } from '../space-model.entity';
import { TagModel } from '../tag-model.entity';
import { SubspaceModelProductAllocationEntity } from './subspace-model-product-allocation.entity'; import { SubspaceModelProductAllocationEntity } from './subspace-model-product-allocation.entity';
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
@Entity({ name: 'subspace-model' }) @Entity({ name: 'subspace-model' })
export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> { export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
@ -40,6 +41,9 @@ export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
}) })
public disabled: boolean; public disabled: boolean;
@OneToMany(() => TagModel, (tag) => tag.subspaceModel)
tags: TagModel[];
@OneToMany( @OneToMany(
() => SubspaceModelProductAllocationEntity, () => SubspaceModelProductAllocationEntity,
(allocation) => allocation.subspaceModel, (allocation) => allocation.subspaceModel,

View File

@ -0,0 +1,38 @@
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { TagModelDto } from '../dtos/tag-model.dto';
import { SpaceModelEntity } from './space-model.entity';
import { SubspaceModelEntity } from './subspace-model';
import { ProductEntity } from '../../product/entities';
import { TagEntity } from '../../space/entities/tag.entity';
@Entity({ name: 'tag_model' })
export class TagModel extends AbstractEntity<TagModelDto> {
@Column({ type: 'varchar', length: 255 })
tag: string;
@ManyToOne(() => ProductEntity, (product) => product.tagModels, {
nullable: false,
})
@JoinColumn({ name: 'product_id' })
product: ProductEntity;
@ManyToOne(() => SpaceModelEntity, (space) => space.tags, { nullable: true })
@JoinColumn({ name: 'space_model_id' })
spaceModel: SpaceModelEntity;
@ManyToOne(() => SubspaceModelEntity, (subspace) => subspace.tags, {
nullable: true,
})
@JoinColumn({ name: 'subspace_model_id' })
subspaceModel: SubspaceModelEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@OneToMany(() => TagEntity, (tag) => tag.model)
tags: TagEntity[];
}

View File

@ -1,10 +1,11 @@
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm'; import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { import {
SpaceModelEntity, SpaceModelEntity,
SpaceModelProductAllocationEntity, SpaceModelProductAllocationEntity,
SubspaceModelEntity, SubspaceModelEntity,
SubspaceModelProductAllocationEntity, SubspaceModelProductAllocationEntity,
TagModel,
} from '../entities'; } from '../entities';
@Injectable() @Injectable()
@ -20,6 +21,13 @@ export class SubspaceModelRepository extends Repository<SubspaceModelEntity> {
} }
} }
@Injectable()
export class TagModelRepository extends Repository<TagModel> {
constructor(private dataSource: DataSource) {
super(TagModel, dataSource.createEntityManager());
}
}
@Injectable() @Injectable()
export class SpaceModelProductAllocationRepoitory extends Repository<SpaceModelProductAllocationEntity> { export class SpaceModelProductAllocationRepoitory extends Repository<SpaceModelProductAllocationEntity> {
constructor(private dataSource: DataSource) { constructor(private dataSource: DataSource) {

View File

@ -1,11 +1,13 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm'; import { TypeOrmModule } from '@nestjs/typeorm';
import { SpaceModelEntity, SubspaceModelEntity } from './entities'; import { SpaceModelEntity, SubspaceModelEntity, TagModel } from './entities';
import { Module } from '@nestjs/common';
@Module({ @Module({
providers: [], providers: [],
exports: [], exports: [],
controllers: [], controllers: [],
imports: [TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity])], imports: [
TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity, TagModel]),
],
}) })
export class SpaceModelRepositoryModule {} export class SpaceModelRepositoryModule {}

View File

@ -0,0 +1,32 @@
import { Column, Entity, JoinColumn, ManyToOne } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from './space.entity';
import { Direction } from '@app/common/constants/direction.enum';
@Entity({ name: 'space-link' })
export class SpaceLinkEntity extends AbstractEntity {
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
@JoinColumn({ name: 'start_space_id' })
public startSpace: SpaceEntity;
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
@JoinColumn({ name: 'end_space_id' })
public endSpace: SpaceEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@Column({
nullable: false,
enum: Object.values(Direction),
})
direction: string;
constructor(partial: Partial<SpaceLinkEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -1,13 +1,12 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm'; import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { SpaceProductAllocationDto } from '../dtos/space-product-allocation.dto';
import { SpaceEntity } from './space.entity'; import { SpaceEntity } from './space.entity';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceProductAllocationDto } from '../dtos/space-product-allocation.dto';
@Entity({ name: 'space_product_allocation' }) @Entity({ name: 'space_product_allocation' })
@Unique(['space', 'product', 'tag'], {})
export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAllocationDto> { export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAllocationDto> {
@Column({ @Column({
type: 'uuid', type: 'uuid',
@ -31,8 +30,9 @@ export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAll
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' }) @ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity; public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' }) @ManyToMany(() => NewTagEntity)
public tag: NewTagEntity; @JoinTable({ name: 'space_product_tags' })
public tags: NewTagEntity[];
constructor(partial: Partial<SpaceProductAllocationEntity>) { constructor(partial: Partial<SpaceProductAllocationEntity>) {
super(); super();

View File

@ -1,26 +1,16 @@
import { import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
Column, import { SpaceDto } from '../dtos';
Entity,
JoinColumn,
ManyToOne,
OneToMany,
OneToOne,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { AqiSpaceDailyPollutantStatsEntity } from '../../aqi/entities'; import { UserSpaceEntity } from '../../user/entities';
import { BookableSpaceEntity } from '../../booking/entities/bookable-space.entity';
import { CommunityEntity } from '../../community/entities';
import { DeviceEntity } from '../../device/entities'; import { DeviceEntity } from '../../device/entities';
import { InviteUserSpaceEntity } from '../../Invite-user/entities'; import { CommunityEntity } from '../../community/entities';
import { SpaceDailyOccupancyDurationEntity } from '../../occupancy/entities'; import { SpaceLinkEntity } from './space-link.entity';
import { PresenceSensorDailySpaceEntity } from '../../presence-sensor/entities';
import { SceneEntity } from '../../scene/entities'; import { SceneEntity } from '../../scene/entities';
import { SpaceModelEntity } from '../../space-model'; import { SpaceModelEntity } from '../../space-model';
import { UserSpaceEntity } from '../../user/entities'; import { InviteUserSpaceEntity } from '../../Invite-user/entities';
import { SpaceDto } from '../dtos';
import { SpaceProductAllocationEntity } from './space-product-allocation.entity'; import { SpaceProductAllocationEntity } from './space-product-allocation.entity';
import { SubspaceEntity } from './subspace/subspace.entity'; import { SubspaceEntity } from './subspace/subspace.entity';
import { BookingEntity } from '../../booking/entities/booking.entity'; import { PresenceSensorDailySpaceEntity } from '../../presence-sensor/entities';
@Entity({ name: 'space' }) @Entity({ name: 'space' })
export class SpaceEntity extends AbstractEntity<SpaceDto> { export class SpaceEntity extends AbstractEntity<SpaceDto> {
@ -65,12 +55,6 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
}) })
public disabled: boolean; public disabled: boolean;
@Column({
nullable: true,
type: Number,
})
public order?: number;
@OneToMany(() => SubspaceEntity, (subspace) => subspace.space, { @OneToMany(() => SubspaceEntity, (subspace) => subspace.space, {
nullable: true, nullable: true,
}) })
@ -89,6 +73,16 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
) )
devices: DeviceEntity[]; devices: DeviceEntity[];
@OneToMany(() => SpaceLinkEntity, (connection) => connection.startSpace, {
nullable: true,
})
public outgoingConnections: SpaceLinkEntity[];
@OneToMany(() => SpaceLinkEntity, (connection) => connection.endSpace, {
nullable: true,
})
public incomingConnections: SpaceLinkEntity[];
@Column({ @Column({
nullable: true, nullable: true,
type: 'text', type: 'text',
@ -121,21 +115,6 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
@OneToMany(() => PresenceSensorDailySpaceEntity, (sensor) => sensor.space) @OneToMany(() => PresenceSensorDailySpaceEntity, (sensor) => sensor.space)
presenceSensorDaily: PresenceSensorDailySpaceEntity[]; presenceSensorDaily: PresenceSensorDailySpaceEntity[];
@OneToMany(() => AqiSpaceDailyPollutantStatsEntity, (aqi) => aqi.space)
aqiSensorDaily: AqiSpaceDailyPollutantStatsEntity[];
@OneToMany(
() => SpaceDailyOccupancyDurationEntity,
(occupancy) => occupancy.space,
)
occupancyDaily: SpaceDailyOccupancyDurationEntity[];
@OneToOne(() => BookableSpaceEntity, (bookable) => bookable.space)
bookableConfig: BookableSpaceEntity;
@OneToMany(() => BookingEntity, (booking) => booking.space)
bookings: BookingEntity[];
constructor(partial: Partial<SpaceEntity>) { constructor(partial: Partial<SpaceEntity>) {
super(); super();
Object.assign(this, partial); Object.assign(this, partial);

View File

@ -1,13 +1,20 @@
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity'; import {
Entity,
Column,
ManyToOne,
ManyToMany,
JoinTable,
Unique,
} from 'typeorm';
import { SubspaceEntity } from './subspace.entity';
import { ProductEntity } from '@app/common/modules/product/entities'; import { ProductEntity } from '@app/common/modules/product/entities';
import { SubspaceModelProductAllocationEntity } from '@app/common/modules/space-model'; import { SubspaceModelProductAllocationEntity } from '@app/common/modules/space-model';
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity'; import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
import { Column, Entity, ManyToOne, Unique } from 'typeorm'; import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { SubspaceProductAllocationDto } from '../../dtos/subspace-product-allocation.dto'; import { SubspaceProductAllocationDto } from '../../dtos/subspace-product-allocation.dto';
import { SubspaceEntity } from './subspace.entity';
@Entity({ name: 'subspace_product_allocation' }) @Entity({ name: 'subspace_product_allocation' })
@Unique(['subspace', 'product', 'tag']) @Unique(['subspace', 'product'])
export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProductAllocationDto> { export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProductAllocationDto> {
@Column({ @Column({
type: 'uuid', type: 'uuid',
@ -31,8 +38,9 @@ export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProd
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' }) @ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity; public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' }) @ManyToMany(() => NewTagEntity)
public tag: NewTagEntity; @JoinTable({ name: 'subspace_product_tags' })
public tags: NewTagEntity[];
constructor(partial: Partial<SubspaceProductAllocationEntity>) { constructor(partial: Partial<SubspaceProductAllocationEntity>) {
super(); super();

View File

@ -4,6 +4,7 @@ import { SubspaceModelEntity } from '@app/common/modules/space-model';
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm'; import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { SubspaceDto } from '../../dtos'; import { SubspaceDto } from '../../dtos';
import { SpaceEntity } from '../space.entity'; import { SpaceEntity } from '../space.entity';
import { TagEntity } from '../tag.entity';
import { SubspaceProductAllocationEntity } from './subspace-product-allocation.entity'; import { SubspaceProductAllocationEntity } from './subspace-product-allocation.entity';
@Entity({ name: 'subspace' }) @Entity({ name: 'subspace' })
@ -42,6 +43,9 @@ export class SubspaceEntity extends AbstractEntity<SubspaceDto> {
}) })
subSpaceModel?: SubspaceModelEntity; subSpaceModel?: SubspaceModelEntity;
@OneToMany(() => TagEntity, (tag) => tag.subspace)
tags: TagEntity[];
@OneToMany( @OneToMany(
() => SubspaceProductAllocationEntity, () => SubspaceProductAllocationEntity,
(allocation) => allocation.subspace, (allocation) => allocation.subspace,

View File

@ -0,0 +1,41 @@
import { Entity, Column, ManyToOne, JoinColumn, OneToOne } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProductEntity } from '../../product/entities';
import { TagDto } from '../dtos';
import { TagModel } from '../../space-model/entities/tag-model.entity';
import { DeviceEntity } from '../../device/entities';
import { SubspaceEntity } from './subspace/subspace.entity';
@Entity({ name: 'tag' })
export class TagEntity extends AbstractEntity<TagDto> {
@Column({ type: 'varchar', length: 255, nullable: true })
tag: string;
@ManyToOne(() => TagModel, (model) => model.tags, {
nullable: true,
})
model: TagModel;
@ManyToOne(() => ProductEntity, (product) => product.tags, {
nullable: false,
})
product: ProductEntity;
@ManyToOne(() => SubspaceEntity, (subspace) => subspace.tags, {
nullable: true,
})
@JoinColumn({ name: 'subspace_id' })
subspace: SubspaceEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@OneToOne(() => DeviceEntity, (device) => device.tag, {
nullable: true,
})
@JoinColumn({ name: 'device_id' })
device: DeviceEntity;
}

View File

@ -1,8 +1,10 @@
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm'; import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { InviteSpaceEntity } from '../entities/invite-space.entity'; import { InviteSpaceEntity } from '../entities/invite-space.entity';
import { SpaceProductAllocationEntity } from '../entities/space-product-allocation.entity'; import { SpaceLinkEntity } from '../entities/space-link.entity';
import { SpaceEntity } from '../entities/space.entity'; import { SpaceEntity } from '../entities/space.entity';
import { TagEntity } from '../entities/tag.entity';
import { SpaceProductAllocationEntity } from '../entities/space-product-allocation.entity';
@Injectable() @Injectable()
export class SpaceRepository extends Repository<SpaceEntity> { export class SpaceRepository extends Repository<SpaceEntity> {
@ -11,6 +13,20 @@ export class SpaceRepository extends Repository<SpaceEntity> {
} }
} }
@Injectable()
export class SpaceLinkRepository extends Repository<SpaceLinkEntity> {
constructor(private dataSource: DataSource) {
super(SpaceLinkEntity, dataSource.createEntityManager());
}
}
@Injectable()
export class TagRepository extends Repository<TagEntity> {
constructor(private dataSource: DataSource) {
super(TagEntity, dataSource.createEntityManager());
}
}
@Injectable() @Injectable()
export class InviteSpaceRepository extends Repository<InviteSpaceEntity> { export class InviteSpaceRepository extends Repository<InviteSpaceEntity> {
constructor(private dataSource: DataSource) { constructor(private dataSource: DataSource) {

View File

@ -6,6 +6,7 @@ import { SpaceProductAllocationEntity } from './entities/space-product-allocatio
import { SpaceEntity } from './entities/space.entity'; import { SpaceEntity } from './entities/space.entity';
import { SubspaceProductAllocationEntity } from './entities/subspace/subspace-product-allocation.entity'; import { SubspaceProductAllocationEntity } from './entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from './entities/subspace/subspace.entity'; import { SubspaceEntity } from './entities/subspace/subspace.entity';
import { TagEntity } from './entities/tag.entity';
@Module({ @Module({
providers: [], providers: [],
@ -15,6 +16,7 @@ import { SubspaceEntity } from './entities/subspace/subspace.entity';
TypeOrmModule.forFeature([ TypeOrmModule.forFeature([
SpaceEntity, SpaceEntity,
SubspaceEntity, SubspaceEntity,
TagEntity,
InviteSpaceEntity, InviteSpaceEntity,
SpaceProductAllocationEntity, SpaceProductAllocationEntity,
SubspaceProductAllocationEntity, SubspaceProductAllocationEntity,

View File

@ -1,10 +1,11 @@
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm'; import { Entity, Column, ManyToOne, Unique, ManyToMany } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; import { ProductEntity } from '../../product/entities';
import { DeviceEntity } from '../../device/entities/device.entity';
import { ProjectEntity } from '../../project/entities'; import { ProjectEntity } from '../../project/entities';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { NewTagDto } from '../dtos/tag.dto';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity'; import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { SubspaceModelProductAllocationEntity } from '../../space-model/entities/subspace-model/subspace-model-product-allocation.entity'; import { SubspaceModelProductAllocationEntity } from '../../space-model/entities/subspace-model/subspace-model-product-allocation.entity';
import { NewTagDto } from '../dtos/tag.dto'; import { DeviceEntity } from '../../device/entities/device.entity';
@Entity({ name: 'new_tag' }) @Entity({ name: 'new_tag' })
@Unique(['name', 'project']) @Unique(['name', 'project'])
@ -23,25 +24,31 @@ export class NewTagEntity extends AbstractEntity<NewTagDto> {
}) })
name: string; name: string;
@ManyToOne(() => ProductEntity, (product) => product.newTags, {
nullable: false,
onDelete: 'CASCADE',
})
public product: ProductEntity;
@ManyToOne(() => ProjectEntity, (project) => project.tags, { @ManyToOne(() => ProjectEntity, (project) => project.tags, {
nullable: false, nullable: false,
onDelete: 'CASCADE', onDelete: 'CASCADE',
}) })
public project: ProjectEntity; public project: ProjectEntity;
@OneToMany( @ManyToMany(
() => SpaceModelProductAllocationEntity, () => SpaceModelProductAllocationEntity,
(allocation) => allocation.tag, (allocation) => allocation.tags,
) )
public spaceModelAllocations: SpaceModelProductAllocationEntity[]; public spaceModelAllocations: SpaceModelProductAllocationEntity[];
@OneToMany( @ManyToMany(
() => SubspaceModelProductAllocationEntity, () => SubspaceModelProductAllocationEntity,
(allocation) => allocation.tag, (allocation) => allocation.tags,
) )
public subspaceModelAllocations: SubspaceModelProductAllocationEntity[]; public subspaceModelAllocations: SubspaceModelProductAllocationEntity[];
@OneToMany(() => DeviceEntity, (device) => device.tag) @ManyToOne(() => DeviceEntity, (device) => device.tag)
public devices: DeviceEntity[]; public devices: DeviceEntity[];
constructor(partial: Partial<NewTagEntity>) { constructor(partial: Partial<NewTagEntity>) {

View File

@ -1,4 +1,3 @@
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
import { import {
Column, Column,
DeleteDateColumn, DeleteDateColumn,
@ -9,27 +8,27 @@ import {
OneToOne, OneToOne,
Unique, Unique,
} from 'typeorm'; } from 'typeorm';
import { OtpType } from '../../../../src/constants/otp-type.enum';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ClientEntity } from '../../client/entities';
import {
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../../device/entities';
import { InviteUserEntity } from '../../Invite-user/entities';
import { ProjectEntity } from '../../project/entities';
import { RegionEntity } from '../../region/entities';
import { RoleTypeEntity } from '../../role-type/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { TimeZoneEntity } from '../../timezone/entities';
import { VisitorPasswordEntity } from '../../visitor-password/entities';
import { import {
UserDto, UserDto,
UserNotificationDto, UserNotificationDto,
UserOtpDto, UserOtpDto,
UserSpaceDto, UserSpaceDto,
} from '../dtos'; } from '../dtos';
import { BookingEntity } from '../../booking/entities/booking.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import {
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../../device/entities';
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
import { RegionEntity } from '../../region/entities';
import { TimeZoneEntity } from '../../timezone/entities';
import { OtpType } from '../../../../src/constants/otp-type.enum';
import { RoleTypeEntity } from '../../role-type/entities';
import { VisitorPasswordEntity } from '../../visitor-password/entities';
import { InviteUserEntity } from '../../Invite-user/entities';
import { ProjectEntity } from '../../project/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { ClientEntity } from '../../client/entities';
@Entity({ name: 'user' }) @Entity({ name: 'user' })
export class UserEntity extends AbstractEntity<UserDto> { export class UserEntity extends AbstractEntity<UserDto> {
@ -83,12 +82,6 @@ export class UserEntity extends AbstractEntity<UserDto> {
}) })
public isActive: boolean; public isActive: boolean;
@Column({
nullable: true,
type: Number,
})
public bookingPoints?: number;
@Column({ default: false }) @Column({ default: false })
hasAcceptedWebAgreement: boolean; hasAcceptedWebAgreement: boolean;
@ -101,9 +94,7 @@ export class UserEntity extends AbstractEntity<UserDto> {
@Column({ type: 'timestamp', nullable: true }) @Column({ type: 'timestamp', nullable: true })
appAgreementAcceptedAt: Date; appAgreementAcceptedAt: Date;
@OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user, { @OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user)
onDelete: 'CASCADE',
})
userSpaces: UserSpaceEntity[]; userSpaces: UserSpaceEntity[];
@OneToMany( @OneToMany(
@ -122,9 +113,6 @@ export class UserEntity extends AbstractEntity<UserDto> {
) )
deviceUserNotification: DeviceNotificationEntity[]; deviceUserNotification: DeviceNotificationEntity[];
@OneToMany(() => BookingEntity, (booking) => booking.user)
bookings: BookingEntity[];
@ManyToOne(() => RegionEntity, (region) => region.users, { nullable: true }) @ManyToOne(() => RegionEntity, (region) => region.users, { nullable: true })
region: RegionEntity; region: RegionEntity;
@ManyToOne(() => TimeZoneEntity, (timezone) => timezone.users, { @ManyToOne(() => TimeZoneEntity, (timezone) => timezone.users, {
@ -170,7 +158,6 @@ export class UserEntity extends AbstractEntity<UserDto> {
export class UserNotificationEntity extends AbstractEntity<UserNotificationDto> { export class UserNotificationEntity extends AbstractEntity<UserNotificationDto> {
@ManyToOne(() => UserEntity, (user) => user.roleType, { @ManyToOne(() => UserEntity, (user) => user.roleType, {
nullable: false, nullable: false,
onDelete: 'CASCADE',
}) })
user: UserEntity; user: UserEntity;
@Column({ @Column({
@ -232,10 +219,7 @@ export class UserSpaceEntity extends AbstractEntity<UserSpaceDto> {
}) })
public uuid: string; public uuid: string;
@ManyToOne(() => UserEntity, (user) => user.userSpaces, { @ManyToOne(() => UserEntity, (user) => user.userSpaces, { nullable: false })
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity; user: UserEntity;
@ManyToOne(() => SpaceEntity, (space) => space.userSpaces, { @ManyToOne(() => SpaceEntity, (space) => space.userSpaces, {

View File

@ -1,7 +1,7 @@
import { Column, Entity, Index, JoinColumn, ManyToOne } from 'typeorm'; import { Column, Entity, ManyToOne, JoinColumn, Index } from 'typeorm';
import { VisitorPasswordDto } from '../dtos';
import { AbstractEntity } from '../../abstract/entities/abstract.entity'; import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { UserEntity } from '../../user/entities/user.entity'; import { UserEntity } from '../../user/entities/user.entity';
import { VisitorPasswordDto } from '../dtos';
@Entity({ name: 'visitor-password' }) @Entity({ name: 'visitor-password' })
@Index('IDX_PASSWORD_TUYA_UUID', ['passwordTuyaUuid']) @Index('IDX_PASSWORD_TUYA_UUID', ['passwordTuyaUuid'])
@ -14,7 +14,6 @@ export class VisitorPasswordEntity extends AbstractEntity<VisitorPasswordDto> {
@ManyToOne(() => UserEntity, (user) => user.visitorPasswords, { @ManyToOne(() => UserEntity, (user) => user.visitorPasswords, {
nullable: false, nullable: false,
onDelete: 'CASCADE',
}) })
@JoinColumn({ name: 'authorizer_uuid' }) @JoinColumn({ name: 'authorizer_uuid' })
public user: UserEntity; public user: UserEntity;

View File

@ -1,39 +0,0 @@
WITH params AS (
SELECT
$1::uuid AS space_uuid,
TO_DATE(NULLIF($2, ''), 'YYYY-MM') AS event_month
)
SELECT
sdp.space_uuid,
sdp.event_date,
sdp.good_aqi_percentage, sdp.moderate_aqi_percentage, sdp.unhealthy_sensitive_aqi_percentage, sdp.unhealthy_aqi_percentage,
sdp.very_unhealthy_aqi_percentage, sdp.hazardous_aqi_percentage,
sdp.daily_avg_aqi, sdp.daily_max_aqi, sdp.daily_min_aqi,
sdp.good_pm25_percentage, sdp.moderate_pm25_percentage, sdp.unhealthy_sensitive_pm25_percentage, sdp.unhealthy_pm25_percentage,
sdp.very_unhealthy_pm25_percentage, sdp.hazardous_pm25_percentage,
sdp.daily_avg_pm25, sdp.daily_max_pm25, sdp.daily_min_pm25,
sdp.good_pm10_percentage, sdp.moderate_pm10_percentage, sdp.unhealthy_sensitive_pm10_percentage, sdp.unhealthy_pm10_percentage,
sdp.very_unhealthy_pm10_percentage, sdp.hazardous_pm10_percentage,
sdp.daily_avg_pm10, sdp.daily_max_pm10, sdp.daily_min_pm10,
sdp.good_voc_percentage, sdp.moderate_voc_percentage, sdp.unhealthy_sensitive_voc_percentage, sdp.unhealthy_voc_percentage,
sdp.very_unhealthy_voc_percentage, sdp.hazardous_voc_percentage,
sdp.daily_avg_voc, sdp.daily_max_voc, sdp.daily_min_voc,
sdp.good_co2_percentage, sdp.moderate_co2_percentage, sdp.unhealthy_sensitive_co2_percentage, sdp.unhealthy_co2_percentage,
sdp.very_unhealthy_co2_percentage, sdp.hazardous_co2_percentage,
sdp.daily_avg_co2, sdp.daily_max_co2, sdp.daily_min_co2,
sdp.good_ch2o_percentage, sdp.moderate_ch2o_percentage, sdp.unhealthy_sensitive_ch2o_percentage, sdp.unhealthy_ch2o_percentage,
sdp.very_unhealthy_ch2o_percentage, sdp.hazardous_ch2o_percentage,
sdp.daily_avg_ch2o, sdp.daily_max_ch2o, sdp.daily_min_ch2o
FROM public."space-daily-pollutant-stats" AS sdp
CROSS JOIN params p
WHERE
(p.space_uuid IS NULL OR sdp.space_uuid = p.space_uuid)
AND (p.event_month IS NULL OR TO_CHAR(sdp.event_date, 'YYYY-MM') = TO_CHAR(p.event_month, 'YYYY-MM'))
ORDER BY sdp.space_uuid, sdp.event_date;

View File

@ -1,376 +0,0 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
),
-- Query Pipeline Starts Here
device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
),
final_data as(
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id
AND p.event_date = a.event_date
JOIN params
ON params.event_date = a.event_date
ORDER BY p.space_id, p.event_date)
INSERT INTO public."space-daily-pollutant-stats" (
space_uuid,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
)
SELECT
space_id,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
daily_max_aqi = EXCLUDED.daily_max_aqi,
daily_min_aqi = EXCLUDED.daily_min_aqi,
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
daily_max_pm25 = EXCLUDED.daily_max_pm25,
daily_min_pm25 = EXCLUDED.daily_min_pm25,
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
daily_max_pm10 = EXCLUDED.daily_max_pm10,
daily_min_pm10 = EXCLUDED.daily_min_pm10,
good_voc_percentage = EXCLUDED.good_voc_percentage,
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
daily_avg_voc = EXCLUDED.daily_avg_voc,
daily_max_voc = EXCLUDED.daily_max_voc,
daily_min_voc = EXCLUDED.daily_min_voc,
good_co2_percentage = EXCLUDED.good_co2_percentage,
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
daily_avg_co2 = EXCLUDED.daily_avg_co2,
daily_max_co2 = EXCLUDED.daily_max_co2,
daily_min_co2 = EXCLUDED.daily_min_co2,
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
daily_min_ch2o = EXCLUDED.daily_min_ch2o;

View File

@ -1,367 +0,0 @@
-- Query Pipeline Starts Here
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
),
final_data as(
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date)
INSERT INTO public."space-daily-pollutant-stats" (
space_uuid,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
)
SELECT
space_id,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
daily_max_aqi = EXCLUDED.daily_max_aqi,
daily_min_aqi = EXCLUDED.daily_min_aqi,
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
daily_max_pm25 = EXCLUDED.daily_max_pm25,
daily_min_pm25 = EXCLUDED.daily_min_pm25,
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
daily_max_pm10 = EXCLUDED.daily_max_pm10,
daily_min_pm10 = EXCLUDED.daily_min_pm10,
good_voc_percentage = EXCLUDED.good_voc_percentage,
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
daily_avg_voc = EXCLUDED.daily_avg_voc,
daily_max_voc = EXCLUDED.daily_max_voc,
daily_min_voc = EXCLUDED.daily_min_voc,
good_co2_percentage = EXCLUDED.good_co2_percentage,
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
daily_avg_co2 = EXCLUDED.daily_avg_co2,
daily_max_co2 = EXCLUDED.daily_max_co2,
daily_min_co2 = EXCLUDED.daily_min_co2,
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
daily_min_ch2o = EXCLUDED.daily_min_ch2o;

View File

@ -1,94 +1,100 @@
WITH presence_logs AS ( -- Step 1: Get device presence events with previous timestamps
WITH start_date AS (
SELECT SELECT
d.uuid AS device_id,
d.space_device_uuid AS space_id, d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value, l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time, l.event_time::timestamp AS event_time,
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
FROM device d FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id LEFT JOIN "device-status-log" l
JOIN product p ON p.uuid = d.product_device_uuid ON d.uuid = l.device_id
WHERE l.code = 'presence_state' LEFT JOIN product p
AND p.cat_name = 'hps' ON p.uuid = d.product_device_uuid
WHERE p.cat_name = 'hps'
AND l.code = 'presence_state'
), ),
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none') -- Step 2: Identify periods when device reports "none"
presence_intervals AS ( device_none_periods AS (
SELECT SELECT
space_id, space_id,
prev_time AS start_time, device_id,
event_time AS end_time event_time AS empty_from,
FROM presence_logs LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
FROM start_date
WHERE value = 'none' WHERE value = 'none'
AND prev_value = 'presence'
AND prev_time IS NOT NULL
), ),
-- Split intervals across days -- Step 3: Clip the "none" periods to the edges of each day
split_intervals AS ( clipped_device_none_periods AS (
SELECT SELECT
space_id, space_id,
generate_series( GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
date_trunc('day', start_time), LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
date_trunc('day', end_time), FROM device_none_periods
interval '1 day' WHERE empty_until IS NOT NULL
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
FROM presence_intervals
), ),
-- Mark and group overlapping intervals per space per day -- Step 4: Break multi-day periods into daily intervals
ordered_intervals AS ( generated_daily_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, gs::date AS day,
interval_start, GREATEST(clipped_from, gs) AS interval_start,
interval_end, LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end FROM clipped_device_none_periods,
FROM split_intervals LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
), ),
grouped_intervals AS ( -- Step 5: Merge overlapping or adjacent intervals per day
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
),
-- Merge overlapping intervals per group
merged_intervals AS ( merged_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, day,
MIN(interval_start) AS merged_start, interval_start,
MAX(interval_end) AS merged_end interval_end
FROM grouped_intervals FROM (
GROUP BY space_id, event_date, grp
),
-- Sum durations of merged intervals
summed_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, day,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds interval_start,
FROM merged_intervals interval_end,
GROUP BY space_id, event_date LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
), FROM generated_daily_intervals
) sub
WHERE prev_end IS NULL OR interval_start > prev_end
),
final_data AS ( -- Step 6: Sum up total missing seconds (device reported "none") per day
SELECT missing_seconds_per_day AS (
SELECT
space_id, space_id,
event_date, day AS missing_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds, SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage FROM merged_intervals
FROM summed_intervals GROUP BY space_id, day
ORDER BY space_id, event_date) ),
-- Step 7: Calculate total occupied time per day (86400 - missing)
occupied_seconds_per_day AS (
SELECT
space_id,
missing_date as event_date,
86400 - total_missing_seconds AS total_occupied_seconds,
(86400 - total_missing_seconds)/86400*100 as occupancy_prct
FROM missing_seconds_per_day
)
-- Final Output
, final_data as (
SELECT space_id,
event_date,
total_occupied_seconds,
occupancy_prct
FROM occupied_seconds_per_day
ORDER BY 1,2
)
INSERT INTO public."space-daily-occupancy-duration" ( INSERT INTO public."space-daily-occupancy-duration" (
space_uuid, space_uuid,
@ -98,13 +104,12 @@ INSERT INTO public."space-daily-occupancy-duration" (
) )
select space_id, select space_id,
event_date, event_date,
occupied_seconds, total_occupied_seconds,
occupancy_percentage occupancy_prct
FROM final_data FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE ON CONFLICT (space_uuid, event_date) DO UPDATE
SET SET
occupancy_percentage = EXCLUDED.occupancy_percentage, occupancy_percentage = EXCLUDED.occupancy_percentage;
occupied_seconds = EXCLUDED.occupied_seconds;

View File

@ -1,107 +1,117 @@
WITH params AS ( WITH params AS (
SELECT SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date,
), $2::uuid AS space_id
)
presence_logs AS ( , start_date AS (
SELECT SELECT
d.uuid AS device_id,
d.space_device_uuid AS space_id, d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value, l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time l.event_time::timestamp AS event_time,
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
FROM device d FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id LEFT JOIN "device-status-log" l
JOIN product p ON p.uuid = d.product_device_uuid ON d.uuid = l.device_id
WHERE l.code = 'presence_state' LEFT JOIN product p
AND p.cat_name = 'hps' ON p.uuid = d.product_device_uuid
WHERE p.cat_name = 'hps'
AND l.code = 'presence_state'
), ),
presence_intervals AS ( -- Step 2: Identify periods when device reports "none"
device_none_periods AS (
SELECT SELECT
space_id, space_id,
prev_time AS start_time, device_id,
event_time AS end_time event_time AS empty_from,
FROM presence_logs LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
WHERE value = 'none' AND prev_time IS NOT NULL FROM start_date
WHERE value = 'none'
), ),
split_intervals AS ( -- Step 3: Clip the "none" periods to the edges of each day
clipped_device_none_periods AS (
SELECT SELECT
space_id, space_id,
generate_series( GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
date_trunc('day', start_time), LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
date_trunc('day', end_time), FROM device_none_periods
interval '1 day' WHERE empty_until IS NOT NULL
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + INTERVAL '1 day') AS interval_end
FROM presence_intervals
), ),
ordered_intervals AS ( -- Step 4: Break multi-day periods into daily intervals
generated_daily_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, gs::date AS day,
interval_start, GREATEST(clipped_from, gs) AS interval_start,
interval_end, LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end FROM clipped_device_none_periods,
FROM split_intervals LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
),
grouped_intervals AS (
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
), ),
-- Step 5: Merge overlapping or adjacent intervals per day
merged_intervals AS ( merged_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, day,
MIN(interval_start) AS merged_start, interval_start,
MAX(interval_end) AS merged_end interval_end
FROM grouped_intervals FROM (
GROUP BY space_id, event_date, grp
),
summed_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, day,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds interval_start,
FROM merged_intervals interval_end,
GROUP BY space_id, event_date LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
FROM generated_daily_intervals
) sub
WHERE prev_end IS NULL OR interval_start > prev_end
), ),
final_data AS ( -- Step 6: Sum up total missing seconds (device reported "none") per day
missing_seconds_per_day AS (
SELECT SELECT
s.space_id, space_id,
s.event_date, day AS missing_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds, SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage FROM merged_intervals
FROM summed_intervals s GROUP BY space_id, day
JOIN params p ),
ON p.event_date = s.event_date
-- Step 7: Calculate total occupied time per day (86400 - missing)
occupied_seconds_per_day AS (
SELECT
space_id,
missing_date as event_date,
86400 - total_missing_seconds AS total_occupied_seconds,
(86400 - total_missing_seconds)/86400*100 as occupancy_percentage
FROM missing_seconds_per_day
)
-- Final Output
, final_data as (
SELECT occupied_seconds_per_day.space_id,
occupied_seconds_per_day.event_date,
occupied_seconds_per_day.occupancy_percentage
FROM occupied_seconds_per_day
join params p on true
and p.space_id = occupied_seconds_per_day.space_id
and p.event_date = occupied_seconds_per_day.event_date
ORDER BY 1,2
) )
INSERT INTO public."space-daily-occupancy-duration" ( INSERT INTO public."space-daily-occupancy-duration" (
space_uuid, space_uuid,
event_date, event_date,
occupied_seconds,
occupancy_percentage occupancy_percentage
) )
SELECT select space_id,
space_id,
event_date, event_date,
occupied_seconds,
occupancy_percentage occupancy_percentage
FROM final_data FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE ON CONFLICT (space_uuid, event_date) DO UPDATE
SET SET
occupancy_percentage = EXCLUDED.occupancy_percentage, occupancy_percentage = EXCLUDED.occupancy_percentage;
occupied_seconds = EXCLUDED.occupied_seconds;

View File

@ -1,6 +1,7 @@
WITH params AS ( WITH params AS (
SELECT SELECT
$1::date AS target_date $1::uuid AS device_id,
$2::date AS target_date
), ),
total_energy AS ( total_energy AS (
SELECT SELECT
@ -13,6 +14,7 @@ total_energy AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed' WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
@ -27,6 +29,7 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA' WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
@ -41,6 +44,7 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB' WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
@ -55,6 +59,7 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC' WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),

View File

@ -1,6 +1,8 @@
WITH params AS ( WITH params AS (
SELECT SELECT
$1::date AS target_date $1::uuid AS device_id,
$2::date AS target_date,
$3::text AS target_hour
), ),
total_energy AS ( total_energy AS (
SELECT SELECT
@ -13,7 +15,9 @@ total_energy AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed' WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
energy_phase_A AS ( energy_phase_A AS (
@ -27,7 +31,9 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA' WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
energy_phase_B AS ( energy_phase_B AS (
@ -41,7 +47,9 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB' WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
energy_phase_C AS ( energy_phase_C AS (
@ -55,7 +63,9 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC' WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
final_data AS ( final_data AS (

View File

@ -1,6 +1,7 @@
WITH params AS ( WITH params AS (
SELECT SELECT
$1::text AS target_month -- Format should match 'MM-YYYY' $1::uuid AS device_id,
$2::text AS target_month -- Format should match 'MM-YYYY'
), ),
total_energy AS ( total_energy AS (
SELECT SELECT
@ -13,6 +14,7 @@ total_energy AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed' WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
@ -27,6 +29,7 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA' WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
@ -41,6 +44,7 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB' WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),
@ -55,6 +59,7 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC' WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5 GROUP BY 1,2,3,4,5
), ),

View File

@ -16,5 +16,4 @@ WITH params AS (
WHERE A.device_uuid::text = ANY(P.device_ids) WHERE A.device_uuid::text = ANY(P.device_ids)
AND (P.month IS NULL AND (P.month IS NULL
OR date_trunc('month', A.event_date) = P.month OR date_trunc('month', A.event_date) = P.month
); )

View File

@ -1,6 +1,7 @@
WITH params AS ( WITH params AS (
SELECT SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date,
$2::uuid AS space_id
), ),
device_logs AS ( device_logs AS (
@ -86,7 +87,8 @@ SELECT summary.space_id,
count_total_presence_detected count_total_presence_detected
FROM summary FROM summary
JOIN params P ON true JOIN params P ON true
where (P.event_date IS NULL or summary.event_date::date = P.event_date) where summary.space_id = P.space_id
and (P.event_date IS NULL or summary.event_date::date = P.event_date)
ORDER BY space_id, event_date) ORDER BY space_id, event_date)

View File

@ -26,67 +26,42 @@ BEGIN
('pm10', 255, 354, 151, 200), ('pm10', 255, 354, 151, 200),
-- VOC -- VOC
('voc', 0, 200, 0, 50), ('voc_value', 0, 200, 0, 50),
('voc', 201, 400, 51, 100), ('voc_value', 201, 400, 51, 100),
('voc', 401, 600, 101, 150), ('voc_value', 401, 600, 101, 150),
('voc', 601, 1000, 151, 200), ('voc_value', 601, 1000, 151, 200),
-- CH2O -- CH2O
('ch2o', 0, 2, 0, 50), ('ch2o_value', 0, 2, 0, 50),
('ch2o', 2.1, 4, 51, 100), ('ch2o_value', 2.1, 4, 51, 100),
('ch2o', 4.1, 6, 101, 150), ('ch2o_value', 4.1, 6, 101, 150),
-- CO2 -- CO2
('co2', 350, 1000, 0, 50), ('co2_value', 350, 1000, 0, 50),
('co2', 1001, 1250, 51, 100), ('co2_value', 1001, 1250, 51, 100),
('co2', 1251, 1500, 101, 150), ('co2_value', 1251, 1500, 101, 150),
('co2', 1501, 2000, 151, 200) ('co2_value', 1501, 2000, 151, 200)
) AS v(pollutant, c_low, c_high, i_low, i_high) ) AS v(pollutant, c_low, c_high, i_low, i_high)
WHERE v.pollutant = LOWER(p_pollutant) WHERE v.pollutant = LOWER(p_pollutant)
AND concentration BETWEEN v.c_low AND v.c_high AND concentration BETWEEN v.c_low AND v.c_high
LIMIT 1; LIMIT 1;
-- Linear interpolation
RETURN ROUND(((i_high - i_low) * (concentration - c_low) / (c_high - c_low)) + i_low); RETURN ROUND(((i_high - i_low) * (concentration - c_low) / (c_high - c_low)) + i_low);
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
-- CTE for device + status log + space
-- Function to classify AQI
CREATE OR REPLACE FUNCTION classify_aqi(aqi NUMERIC)
RETURNS TEXT AS $$
BEGIN
RETURN CASE
WHEN aqi BETWEEN 0 AND 50 THEN 'Good'
WHEN aqi BETWEEN 51 AND 100 THEN 'Moderate'
WHEN aqi BETWEEN 101 AND 150 THEN 'Unhealthy for Sensitive Groups'
WHEN aqi BETWEEN 151 AND 200 THEN 'Unhealthy'
WHEN aqi BETWEEN 201 AND 300 THEN 'Very Unhealthy'
WHEN aqi >= 301 THEN 'Hazardous'
ELSE NULL
END;
END;
$$ LANGUAGE plpgsql;
-- Function to convert AQI level string to number
CREATE OR REPLACE FUNCTION level_to_numeric(level_text TEXT)
RETURNS NUMERIC AS $$
BEGIN
RETURN CAST(regexp_replace(level_text, '[^0-9]', '', 'g') AS NUMERIC);
EXCEPTION WHEN others THEN
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
-- Query Pipeline Starts Here
WITH device_space AS ( WITH device_space AS (
SELECT SELECT
device.uuid AS device_id, device.uuid AS device_id,
device.created_at,
device.space_device_uuid AS space_id, device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time, "device-status-log".event_id,
"device-status-log".event_time::date,
"device-status-log".code, "device-status-log".code,
"device-status-log".value "device-status-log".value,
"device-status-log".log
FROM device FROM device
LEFT JOIN "device-status-log" LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id ON device.uuid = "device-status-log".device_id
@ -95,268 +70,75 @@ WITH device_space AS (
WHERE product.cat_name = 'hjjcy' WHERE product.cat_name = 'hjjcy'
), ),
average_pollutants AS ( -- Aggregate air sensor data per device per day
air_data AS (
SELECT SELECT
event_time::date AS event_date, event_time AS date,
date_trunc('hour', event_time) AS event_hour,
device_id, device_id,
space_id, space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC -- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min, MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max, MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
-- PM2.5
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
-- CH2O -- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min, MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max, MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
-- Humidity
MIN(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_low,
MAX(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_high,
AVG(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_avg,
-- Temperature
MIN(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_low,
MAX(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_high,
AVG(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_avg,
-- CO2 -- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min, MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg, MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg
FROM device_space FROM device_space
GROUP BY device_id, space_id, event_hour, event_date GROUP BY date, device_id, space_id
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
device_id,
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT device_id, space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, aqi_category
UNION ALL
SELECT device_id, space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, pm25_category
UNION ALL
SELECT device_id, space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, pm10_category
UNION ALL
SELECT device_id, space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, voc_category
UNION ALL
SELECT device_id, space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, co2_category
UNION ALL
SELECT device_id, space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
device_id,
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY device_id, space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.device_id,
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.device_id = dcc.device_id AND dt.event_date = dcc.event_date
GROUP BY dt.device_id, dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
device_id,
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY device_id, space_id, event_date
) )
-- Final select with AQI calculation
SELECT SELECT
p.device_id, date,
p.space_id, device_id,
p.event_date, space_id,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage, voc_min, voc_max, voc_avg,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi, pm1_min, pm1_max, pm1_avg,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage, pm25_min, pm25_max, pm25_avg,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25, pm10_min, pm10_max, pm10_avg,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage, ch2o_min, ch2o_max, ch2o_avg,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10, humidity_low, humidity_high, humidity_avg,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage, temp_low, temp_high, temp_avg,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc, co2_min, co2_max, co2_avg,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage, GREATEST(
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2, calculate_aqi('pm25', pm25_avg),
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage, calculate_aqi('pm10', pm10_avg),
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o calculate_aqi('voc_value', voc_avg),
FROM daily_percentages p calculate_aqi('co2_value', co2_avg),
LEFT JOIN daily_averages a calculate_aqi('ch2o_value', ch2o_avg)
ON p.device_id = a.device_id AND p.event_date = a.event_date ) AS overall_AQI
ORDER BY p.space_id, p.event_date; FROM air_data;

View File

@ -1,275 +0,0 @@
-- Query Pipeline Starts Here
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
)
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date;

View File

@ -1,90 +1,91 @@
WITH presence_logs AS ( -- Step 1: Get device presence events with previous timestamps
WITH start_date AS (
SELECT SELECT
d.uuid AS device_id,
d.space_device_uuid AS space_id, d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value, l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time, l.event_time::timestamp AS event_time,
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
FROM device d FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id LEFT JOIN "device-status-log" l
JOIN product p ON p.uuid = d.product_device_uuid ON d.uuid = l.device_id
WHERE l.code = 'presence_state' LEFT JOIN product p
AND p.cat_name = 'hps' ON p.uuid = d.product_device_uuid
WHERE p.cat_name = 'hps'
AND l.code = 'presence_state'
), ),
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none') -- Step 2: Identify periods when device reports "none"
presence_intervals AS ( device_none_periods AS (
SELECT SELECT
space_id, space_id,
prev_time AS start_time, device_id,
event_time AS end_time event_time AS empty_from,
FROM presence_logs LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
FROM start_date
WHERE value = 'none' WHERE value = 'none'
AND prev_value = 'presence'
AND prev_time IS NOT NULL
), ),
-- Split intervals across days -- Step 3: Clip the "none" periods to the edges of each day
split_intervals AS ( clipped_device_none_periods AS (
SELECT SELECT
space_id, space_id,
generate_series( GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
date_trunc('day', start_time), LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
date_trunc('day', end_time), FROM device_none_periods
interval '1 day' WHERE empty_until IS NOT NULL
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
FROM presence_intervals
), ),
-- Mark and group overlapping intervals per space per day -- Step 4: Break multi-day periods into daily intervals
ordered_intervals AS ( generated_daily_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, gs::date AS day,
interval_start, GREATEST(clipped_from, gs) AS interval_start,
interval_end, LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end FROM clipped_device_none_periods,
FROM split_intervals LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
), ),
grouped_intervals AS ( -- Step 5: Merge overlapping or adjacent intervals per day
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
),
-- Merge overlapping intervals per group
merged_intervals AS ( merged_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, day,
MIN(interval_start) AS merged_start, interval_start,
MAX(interval_end) AS merged_end interval_end
FROM grouped_intervals FROM (
GROUP BY space_id, event_date, grp
),
-- Sum durations of merged intervals
summed_intervals AS (
SELECT SELECT
space_id, space_id,
event_date, day,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
FROM generated_daily_intervals
) sub
WHERE prev_end IS NULL OR interval_start > prev_end
),
-- Step 6: Sum up total missing seconds (device reported "none") per day
missing_seconds_per_day AS (
SELECT
space_id,
day AS missing_date,
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
FROM merged_intervals FROM merged_intervals
GROUP BY space_id, event_date GROUP BY space_id, day
),
-- Step 7: Calculate total occupied time per day (86400 - missing)
occupied_seconds_per_day AS (
SELECT
space_id,
missing_date as date,
86400 - total_missing_seconds AS total_occupied_seconds
FROM missing_seconds_per_day
) )
-- Final output with capped seconds and percentage -- Final Output
SELECT SELECT *
space_id, FROM occupied_seconds_per_day
event_date, ORDER BY 1,2;
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
FROM summed_intervals
ORDER BY space_id, event_date;

View File

@ -1,18 +0,0 @@
export function calculateAQI(pm2_5: number): number {
const breakpoints = [
{ pmLow: 0.0, pmHigh: 12.0, aqiLow: 0, aqiHigh: 50 },
{ pmLow: 12.1, pmHigh: 35.4, aqiLow: 51, aqiHigh: 100 },
{ pmLow: 35.5, pmHigh: 55.4, aqiLow: 101, aqiHigh: 150 },
{ pmLow: 55.5, pmHigh: 150.4, aqiLow: 151, aqiHigh: 200 },
{ pmLow: 150.5, pmHigh: 250.4, aqiLow: 201, aqiHigh: 300 },
{ pmLow: 250.5, pmHigh: 500.4, aqiLow: 301, aqiHigh: 500 },
];
const bp = breakpoints.find((b) => pm2_5 >= b.pmLow && pm2_5 <= b.pmHigh);
if (!bp) return pm2_5 > 500.4 ? 500 : 0; // Handle out-of-range values
return Math.round(
((bp.aqiHigh - bp.aqiLow) / (bp.pmHigh - bp.pmLow)) * (pm2_5 - bp.pmLow) +
bp.aqiLow,
);
}

View File

@ -1,11 +0,0 @@
import { DeviceEntity } from '../modules/device/entities';
export function addSpaceUuidToDevices(
devices: DeviceEntity[],
spaceUuid: string,
): DeviceEntity[] {
return devices.map((device) => {
(device as any).spaceUuid = spaceUuid;
return device;
});
}

View File

@ -1,17 +1,15 @@
import { HttpException, HttpStatus, Injectable } from '@nestjs/common'; import { HttpException, HttpStatus, Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import axios from 'axios';
import * as nodemailer from 'nodemailer'; import * as nodemailer from 'nodemailer';
import Mail from 'nodemailer/lib/mailer'; import axios from 'axios';
import { BatchEmailData } from './batch-email.interface'; import {
import { SingleEmailData } from './single-email.interface'; SEND_EMAIL_API_URL_DEV,
SEND_EMAIL_API_URL_PROD,
} from '../constants/mail-trap';
@Injectable() @Injectable()
export class EmailService { export class EmailService {
private smtpConfig: any; private smtpConfig: any;
private API_TOKEN: string;
private SEND_EMAIL_API_URL: string;
private BATCH_EMAIL_API_URL: string;
constructor(private readonly configService: ConfigService) { constructor(private readonly configService: ConfigService) {
this.smtpConfig = { this.smtpConfig = {
@ -24,15 +22,6 @@ export class EmailService {
pass: this.configService.get<string>('email-config.SMTP_PASSWORD'), pass: this.configService.get<string>('email-config.SMTP_PASSWORD'),
}, },
}; };
this.API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
this.SEND_EMAIL_API_URL = this.configService.get<string>(
'email-config.SEND_EMAIL_API_URL',
);
this.BATCH_EMAIL_API_URL = this.configService.get<string>(
'email-config.BATCH_EMAIL_API_URL',
);
} }
async sendEmail( async sendEmail(
@ -42,7 +31,7 @@ export class EmailService {
): Promise<void> { ): Promise<void> {
const transporter = nodemailer.createTransport(this.smtpConfig); const transporter = nodemailer.createTransport(this.smtpConfig);
const mailOptions: Mail.Options = { const mailOptions = {
from: this.smtpConfig.sender, from: this.smtpConfig.sender,
to: email, to: email,
subject, subject,
@ -55,6 +44,13 @@ export class EmailService {
email: string, email: string,
emailInvitationData: any, emailInvitationData: any,
): Promise<void> { ): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
const TEMPLATE_UUID = this.configService.get<string>( const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_INVITATION_TEMPLATE_UUID', 'email-config.MAILTRAP_INVITATION_TEMPLATE_UUID',
); );
@ -72,23 +68,35 @@ export class EmailService {
template_variables: emailInvitationData, template_variables: emailInvitationData,
}; };
return this.sendEmailWithTemplateV2({ try {
...emailData, await axios.post(API_URL, emailData, {
isBatch: false, headers: {
Authorization: `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json',
},
}); });
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
} }
}
async sendEmailWithTemplate(
email: string,
name: string,
isEnable: boolean,
isDelete: boolean,
): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
async sendEmailWithTemplate({
email,
name,
isEnable,
isDelete,
}: {
email: string;
name: string;
isEnable: boolean;
isDelete: boolean;
}): Promise<void> {
// Determine the template UUID based on the arguments // Determine the template UUID based on the arguments
const templateUuid = isDelete const templateUuid = isDelete
? this.configService.get<string>( ? this.configService.get<string>(
@ -115,16 +123,32 @@ export class EmailService {
}, },
}; };
return this.sendEmailWithTemplateV2({ try {
...emailData, await axios.post(API_URL, emailData, {
isBatch: false, headers: {
Authorization: `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json',
},
}); });
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
} }
async sendEditUserEmailWithTemplate( async sendEditUserEmailWithTemplate(
email: string, email: string,
emailEditData: any, emailEditData: any,
): Promise<void> { ): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
const TEMPLATE_UUID = this.configService.get<string>( const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_EDIT_USER_TEMPLATE_UUID', 'email-config.MAILTRAP_EDIT_USER_TEMPLATE_UUID',
); );
@ -142,15 +166,32 @@ export class EmailService {
template_variables: emailEditData, template_variables: emailEditData,
}; };
return this.sendEmailWithTemplateV2({ try {
...emailData, await axios.post(API_URL, emailData, {
isBatch: false, headers: {
Authorization: `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json',
},
}); });
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
} }
async sendOtpEmailWithTemplate( async sendOtpEmailWithTemplate(
email: string, email: string,
emailEditData: any, emailEditData: any,
): Promise<void> { ): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
const TEMPLATE_UUID = this.configService.get<string>( const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_SEND_OTP_TEMPLATE_UUID', 'email-config.MAILTRAP_SEND_OTP_TEMPLATE_UUID',
); );
@ -168,84 +209,20 @@ export class EmailService {
template_variables: emailEditData, template_variables: emailEditData,
}; };
return this.sendEmailWithTemplateV2({ try {
...emailData, await axios.post(API_URL, emailData, {
isBatch: false, headers: {
}); Authorization: `Bearer ${API_TOKEN}`,
} 'Content-Type': 'application/json',
async sendUpdateBookingTimingEmailWithTemplate(
emails: {
email: string;
name: string;
bookings: {
date: string;
start_time: string;
end_time: string;
}[];
}[],
emailVariables: {
space_name: string;
days: string;
start_time: string;
end_time: string;
}, },
): Promise<void> { });
const TEMPLATE_UUID = this.configService.get<string>( } catch (error) {
'email-config.MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID', throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
); );
const emailData = {
base: {
from: {
email: this.smtpConfig.sender,
},
template_uuid: TEMPLATE_UUID,
},
requests: emails.map(({ email, name, bookings }) => ({
to: [{ email }],
template_variables: { ...emailVariables, name, bookings },
})),
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: true,
});
} }
async sendUpdateBookingAvailabilityEmailWithTemplate(
emails: { email: string; name: string }[],
emailVariables: {
space_name: string;
availability: string;
isAvailable: boolean;
},
): Promise<void> {
const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID',
);
const emailData = {
base: {
from: {
email: this.smtpConfig.sender,
},
template_uuid: TEMPLATE_UUID,
},
requests: emails.map(({ email, name }) => ({
to: [{ email }],
template_variables: {
...emailVariables,
name,
},
})),
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: true,
});
} }
generateUserChangesEmailBody( generateUserChangesEmailBody(
addedSpaceNames: string[], addedSpaceNames: string[],
@ -282,30 +259,4 @@ export class EmailService {
nameChanged, nameChanged,
}; };
} }
private async sendEmailWithTemplateV2({
isBatch,
...emailData
}: BatchEmailData | SingleEmailData): Promise<void> {
try {
await axios.post(
isBatch ? this.BATCH_EMAIL_API_URL : this.SEND_EMAIL_API_URL,
{
...emailData,
},
{
headers: {
Authorization: `Bearer ${this.API_TOKEN}`,
'Content-Type': 'application/json',
},
},
);
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
} }

View File

@ -1,8 +0,0 @@
export interface BatchEmailData {
base: { from: { email: string }; template_uuid: string };
requests: Array<{
to: { email: string }[];
template_variables: Record<string, any>;
}>;
isBatch: true;
}

View File

@ -1,7 +0,0 @@
export interface SingleEmailData {
from: { email: string };
to: { email: string }[];
template_uuid: string;
template_variables?: Record<string, any>;
isBatch: false;
}

View File

@ -1,7 +0,0 @@
import { format, parse } from 'date-fns';
export function to12HourFormat(timeString: string): string {
timeString = timeString.padEnd(8, ':00');
const parsedTime = parse(timeString, 'HH:mm:ss', new Date());
return format(parsedTime, 'hh:mm a');
}

3988
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,6 @@
"format": "prettier --write \"apps/**/*.ts\" \"libs/**/*.ts\"", "format": "prettier --write \"apps/**/*.ts\" \"libs/**/*.ts\"",
"start": "npm run test && node dist/main", "start": "npm run test && node dist/main",
"start:dev": "npm run test && npx nest start --watch", "start:dev": "npm run test && npx nest start --watch",
"dev": "npx nest start --watch",
"start:debug": "npm run test && npx nest start --debug --watch", "start:debug": "npm run test && npx nest start --debug --watch",
"start:prod": "npm run test && node dist/main", "start:prod": "npm run test && node dist/main",
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix", "lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
@ -30,7 +29,6 @@
"@nestjs/jwt": "^10.2.0", "@nestjs/jwt": "^10.2.0",
"@nestjs/passport": "^10.0.3", "@nestjs/passport": "^10.0.3",
"@nestjs/platform-express": "^10.0.0", "@nestjs/platform-express": "^10.0.0",
"@nestjs/schedule": "^6.0.0",
"@nestjs/swagger": "^7.3.0", "@nestjs/swagger": "^7.3.0",
"@nestjs/terminus": "^11.0.0", "@nestjs/terminus": "^11.0.0",
"@nestjs/throttler": "^6.4.0", "@nestjs/throttler": "^6.4.0",
@ -44,7 +42,6 @@
"class-validator": "^0.14.1", "class-validator": "^0.14.1",
"crypto-js": "^4.2.0", "crypto-js": "^4.2.0",
"csv-parser": "^3.2.0", "csv-parser": "^3.2.0",
"date-fns": "^4.1.0",
"express-rate-limit": "^7.1.5", "express-rate-limit": "^7.1.5",
"firebase": "^10.12.5", "firebase": "^10.12.5",
"google-auth-library": "^9.14.1", "google-auth-library": "^9.14.1",
@ -52,12 +49,11 @@
"ioredis": "^5.3.2", "ioredis": "^5.3.2",
"morgan": "^1.10.0", "morgan": "^1.10.0",
"nest-winston": "^1.10.2", "nest-winston": "^1.10.2",
"node-cache": "^5.1.2", "nodemailer": "^6.9.10",
"nodemailer": "^7.0.5",
"onesignal-node": "^3.4.0", "onesignal-node": "^3.4.0",
"passport-jwt": "^4.0.1", "passport-jwt": "^4.0.1",
"pg": "^8.11.3", "pg": "^8.11.3",
"reflect-metadata": "^0.2.2", "reflect-metadata": "^0.2.0",
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"typeorm": "^0.3.20", "typeorm": "^0.3.20",
"winston": "^3.17.0", "winston": "^3.17.0",
@ -73,7 +69,6 @@
"@types/jest": "^29.5.2", "@types/jest": "^29.5.2",
"@types/multer": "^1.4.12", "@types/multer": "^1.4.12",
"@types/node": "^20.3.1", "@types/node": "^20.3.1",
"@types/nodemailer": "^6.4.17",
"@types/supertest": "^6.0.0", "@types/supertest": "^6.0.0",
"@typescript-eslint/eslint-plugin": "^6.0.0", "@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0", "@typescript-eslint/parser": "^6.0.0",
@ -91,9 +86,5 @@
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsconfig-paths": "^4.2.0", "tsconfig-paths": "^4.2.0",
"typescript": "^5.1.3" "typescript": "^5.1.3"
},
"engines": {
"node": "20.x",
"npm": "10.x"
} }
} }

View File

@ -1,67 +1,51 @@
import { SeederModule } from '@app/common/seed/seeder.module';
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config'; import { ConfigModule } from '@nestjs/config';
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
import { WinstonModule } from 'nest-winston';
import { AuthenticationModule } from './auth/auth.module';
import { AutomationModule } from './automation/automation.module';
import { ClientModule } from './client/client.module';
import { DeviceCommissionModule } from './commission-device/commission-device.module';
import { CommunityModule } from './community/community.module';
import config from './config'; import config from './config';
import { DeviceMessagesSubscriptionModule } from './device-messages/device-messages.module'; import { AuthenticationModule } from './auth/auth.module';
import { DeviceModule } from './device/device.module'; import { UserModule } from './users/user.module';
import { DoorLockModule } from './door-lock/door.lock.module';
import { GroupModule } from './group/group.module'; import { GroupModule } from './group/group.module';
import { HealthModule } from './health/health.module'; import { DeviceModule } from './device/device.module';
import { UserDevicePermissionModule } from './user-device-permission/user-device-permission.module';
import { CommunityModule } from './community/community.module';
import { SeederModule } from '@app/common/seed/seeder.module';
import { UserNotificationModule } from './user-notification/user-notification.module';
import { DeviceMessagesSubscriptionModule } from './device-messages/device-messages.module';
import { SceneModule } from './scene/scene.module';
import { DoorLockModule } from './door-lock/door.lock.module';
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
import { LoggingInterceptor } from './interceptors/logging.interceptor'; import { LoggingInterceptor } from './interceptors/logging.interceptor';
import { InviteUserModule } from './invite-user/invite-user.module'; import { AutomationModule } from './automation/automation.module';
import { PermissionModule } from './permission/permission.module'; import { RegionModule } from './region/region.module';
import { PowerClampModule } from './power-clamp/power-clamp.module'; import { TimeZoneModule } from './timezone/timezone.module';
import { PrivacyPolicyModule } from './privacy-policy/privacy-policy.module'; import { VisitorPasswordModule } from './vistor-password/visitor-password.module';
import { ScheduleModule } from './schedule/schedule.module';
import { SpaceModule } from './space/space.module';
import { ProductModule } from './product'; import { ProductModule } from './product';
import { ProjectModule } from './project'; import { ProjectModule } from './project';
import { RegionModule } from './region/region.module';
import { RoleModule } from './role/role.module';
import { SceneModule } from './scene/scene.module';
import { ScheduleModule } from './schedule/schedule.module';
import { SpaceModelModule } from './space-model'; import { SpaceModelModule } from './space-model';
import { SpaceModule } from './space/space.module'; import { InviteUserModule } from './invite-user/invite-user.module';
import { TagModule } from './tags/tags.module'; import { PermissionModule } from './permission/permission.module';
import { RoleModule } from './role/role.module';
import { TermsConditionsModule } from './terms-conditions/terms-conditions.module'; import { TermsConditionsModule } from './terms-conditions/terms-conditions.module';
import { TimeZoneModule } from './timezone/timezone.module'; import { PrivacyPolicyModule } from './privacy-policy/privacy-policy.module';
import { UserDevicePermissionModule } from './user-device-permission/user-device-permission.module'; import { TagModule } from './tags/tags.module';
import { UserNotificationModule } from './user-notification/user-notification.module'; import { ClientModule } from './client/client.module';
import { UserModule } from './users/user.module'; import { DeviceCommissionModule } from './commission-device/commission-device.module';
import { VisitorPasswordModule } from './vistor-password/visitor-password.module'; import { PowerClampModule } from './power-clamp/power-clamp.module';
import { WinstonModule } from 'nest-winston';
import { ThrottlerGuard, ThrottlerModule } from '@nestjs/throttler';
import { HealthModule } from './health/health.module';
import { ThrottlerGuard } from '@nestjs/throttler';
import { ThrottlerModule } from '@nestjs/throttler/dist/throttler.module';
import { isArray } from 'class-validator';
import { winstonLoggerOptions } from '../libs/common/src/logger/services/winston.logger'; import { winstonLoggerOptions } from '../libs/common/src/logger/services/winston.logger';
import { AqiModule } from './aqi/aqi.module';
import { OccupancyModule } from './occupancy/occupancy.module'; import { OccupancyModule } from './occupancy/occupancy.module';
import { WeatherModule } from './weather/weather.module';
import { ScheduleModule as NestScheduleModule } from '@nestjs/schedule';
import { SchedulerModule } from './scheduler/scheduler.module';
import { BookingModule } from './booking';
@Module({ @Module({
imports: [ imports: [
ConfigModule.forRoot({ ConfigModule.forRoot({
load: config, load: config,
}), }),
ThrottlerModule.forRoot({ /* ThrottlerModule.forRoot({
throttlers: [{ ttl: 60000, limit: 100 }], throttlers: [{ ttl: 100000, limit: 30 }],
generateKey: (context) => { }), */
const req = context.switchToHttp().getRequest();
console.log('Real IP:', req.headers['x-forwarded-for']);
return req.headers['x-forwarded-for']
? isArray(req.headers['x-forwarded-for'])
? req.headers['x-forwarded-for'][0].split(':')[0]
: req.headers['x-forwarded-for'].split(':')[0]
: req.ip;
},
}),
WinstonModule.forRoot(winstonLoggerOptions), WinstonModule.forRoot(winstonLoggerOptions),
ClientModule, ClientModule,
AuthenticationModule, AuthenticationModule,
@ -95,21 +79,16 @@ import { BookingModule } from './booking';
PowerClampModule, PowerClampModule,
HealthModule, HealthModule,
OccupancyModule, OccupancyModule,
WeatherModule,
AqiModule,
SchedulerModule,
NestScheduleModule.forRoot(),
BookingModule,
], ],
providers: [ providers: [
{ {
provide: APP_INTERCEPTOR, provide: APP_INTERCEPTOR,
useClass: LoggingInterceptor, useClass: LoggingInterceptor,
}, },
{ /* {
provide: APP_GUARD, provide: APP_GUARD,
useClass: ThrottlerGuard, useClass: ThrottlerGuard,
}, }, */
], ],
}) })
export class AppModule {} export class AppModule {}

View File

@ -1,11 +0,0 @@
import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
import { AqiService } from './services';
import { AqiController } from './controllers';
@Module({
imports: [ConfigModule],
controllers: [AqiController],
providers: [AqiService, SqlLoaderService],
})
export class AqiModule {}

View File

@ -1,64 +0,0 @@
import { Controller, Get, Param, Query, UseGuards } from '@nestjs/common';
import {
ApiTags,
ApiBearerAuth,
ApiOperation,
ApiParam,
} from '@nestjs/swagger';
import { EnableDisableStatusEnum } from '@app/common/constants/days.enum';
import { ControllerRoute } from '@app/common/constants/controller-route';
import { JwtAuthGuard } from '@app/common/guards/jwt.auth.guard';
import { AqiService } from '../services/aqi.service';
import {
GetAqiDailyBySpaceDto,
GetAqiPollutantBySpaceDto,
} from '../dto/get-aqi.dto';
import { BaseResponseDto } from '@app/common/dto/base.response.dto';
import { SpaceParamsDto } from '../dto/aqi-params.dto';
@ApiTags('AQI Module')
@Controller({
version: EnableDisableStatusEnum.ENABLED,
path: ControllerRoute.AQI.ROUTE,
})
export class AqiController {
constructor(private readonly aqiService: AqiService) {}
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Get('range/space/:spaceUuid')
@ApiOperation({
summary: ControllerRoute.AQI.ACTIONS.GET_AQI_RANGE_DATA_SUMMARY,
description: ControllerRoute.AQI.ACTIONS.GET_AQI_RANGE_DATA_DESCRIPTION,
})
@ApiParam({
name: 'spaceUuid',
description: 'UUID of the Space',
required: true,
})
async getAQIRangeDataBySpace(
@Param() params: SpaceParamsDto,
@Query() query: GetAqiDailyBySpaceDto,
): Promise<BaseResponseDto> {
return await this.aqiService.getAQIRangeDataBySpace(params, query);
}
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Get('distribution/space/:spaceUuid')
@ApiOperation({
summary: ControllerRoute.AQI.ACTIONS.GET_AQI_DISTRIBUTION_DATA_SUMMARY,
description:
ControllerRoute.AQI.ACTIONS.GET_AQI_DISTRIBUTION_DATA_DESCRIPTION,
})
@ApiParam({
name: 'spaceUuid',
description: 'UUID of the Space',
required: true,
})
async getAQIDistributionDataBySpace(
@Param() params: SpaceParamsDto,
@Query() query: GetAqiPollutantBySpaceDto,
): Promise<BaseResponseDto> {
return await this.aqiService.getAQIDistributionDataBySpace(params, query);
}
}

View File

@ -1 +0,0 @@
export * from './aqi.controller';

View File

@ -1,7 +0,0 @@
import { IsNotEmpty, IsUUID } from 'class-validator';
export class SpaceParamsDto {
@IsUUID('4', { message: 'Invalid UUID format' })
@IsNotEmpty()
spaceUuid: string;
}

View File

@ -1,37 +0,0 @@
import { PollutantType } from '@app/common/constants/pollutants.enum';
import { ApiProperty } from '@nestjs/swagger';
import { Matches, IsNotEmpty, IsString } from 'class-validator';
export class GetAqiDailyBySpaceDto {
@ApiProperty({
description: 'Month and year in format YYYY-MM',
example: '2025-03',
required: true,
})
@Matches(/^\d{4}-(0[1-9]|1[0-2])$/, {
message: 'monthDate must be in YYYY-MM format',
})
@IsNotEmpty()
monthDate: string;
}
export class GetAqiPollutantBySpaceDto {
@ApiProperty({
description: 'Pollutant Type',
enum: PollutantType,
example: PollutantType.AQI,
required: true,
})
@IsString()
@IsNotEmpty()
public pollutantType: string;
@ApiProperty({
description: 'Month and year in format YYYY-MM',
example: '2025-03',
required: true,
})
@Matches(/^\d{4}-(0[1-9]|1[0-2])$/, {
message: 'monthDate must be in YYYY-MM format',
})
@IsNotEmpty()
monthDate: string;
}

View File

@ -1,138 +0,0 @@
import { HttpException, HttpStatus, Injectable } from '@nestjs/common';
import {
GetAqiDailyBySpaceDto,
GetAqiPollutantBySpaceDto,
} from '../dto/get-aqi.dto';
import { SuccessResponseDto } from '@app/common/dto/success.response.dto';
import { SpaceParamsDto } from '../dto/aqi-params.dto';
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { BaseResponseDto } from '@app/common/dto/base.response.dto';
import { convertKeysToCamelCase } from '@app/common/helper/camelCaseConverter';
import { PollutantType } from '@app/common/constants/pollutants.enum';
@Injectable()
export class AqiService {
constructor(
private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource,
) {}
async getAQIDistributionDataBySpace(
params: SpaceParamsDto,
query: GetAqiPollutantBySpaceDto,
): Promise<BaseResponseDto> {
const { monthDate, pollutantType } = query;
const { spaceUuid } = params;
try {
const data = await this.executeProcedure(
'fact_daily_space_aqi',
'proceduce_select_daily_space_aqi',
[spaceUuid, monthDate],
);
const categories = [
'good',
'moderate',
'unhealthy_sensitive',
'unhealthy',
'very_unhealthy',
'hazardous',
];
const transformedData = data.map((item) => {
const date = new Date(item.event_date).toLocaleDateString('en-CA'); // YYYY-MM-DD
const categoryData = categories.map((category) => {
const key = `${category}_${pollutantType.toLowerCase()}_percentage`;
return {
type: category,
percentage: item[key] ?? 0,
};
});
return { date, data: categoryData };
});
const response = this.buildResponse(
`AQI distribution data fetched successfully for ${spaceUuid} space and pollutant ${pollutantType}`,
transformedData,
);
return response;
} catch (error) {
console.error('Failed to fetch AQI distribution data', {
error,
spaceUuid,
});
throw new HttpException(
error.response?.message || 'Failed to fetch AQI distribution data',
error.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async getAQIRangeDataBySpace(
params: SpaceParamsDto,
query: GetAqiDailyBySpaceDto,
): Promise<BaseResponseDto> {
const { monthDate } = query;
const { spaceUuid } = params;
try {
const data = await this.executeProcedure(
'fact_daily_space_aqi',
'proceduce_select_daily_space_aqi',
[spaceUuid, monthDate],
);
// Define pollutants dynamically
const pollutants = Object.values(PollutantType);
const transformedData = data.map((item) => {
const date = new Date(item.event_date).toLocaleDateString('en-CA'); // YYYY-MM-DD
const dailyData = pollutants.map((type) => ({
type,
min: item[`daily_min_${type}`],
max: item[`daily_max_${type}`],
average: item[`daily_avg_${type}`],
}));
return { date, data: dailyData };
});
const response = this.buildResponse(
`AQI data fetched successfully for ${spaceUuid} space`,
transformedData,
);
return convertKeysToCamelCase(response);
} catch (error) {
console.error('Failed to fetch AQI data', {
error,
spaceUuid,
});
throw new HttpException(
error.response?.message || 'Failed to fetch AQI data',
error.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
private buildResponse(message: string, data: any[]) {
return new SuccessResponseDto({
message,
data,
statusCode: HttpStatus.OK,
});
}
private async executeProcedure(
procedureFolderName: string,
procedureFileName: string,
params: (string | number | null)[],
): Promise<any[]> {
const query = this.loadQuery(procedureFolderName, procedureFileName);
return await this.dataSource.query(query, params);
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}
}

Some files were not shown because too many files have changed in this diff Show More