Compare commits

..

1 Commits

Author SHA1 Message Date
478c4ca498 device model fix timestamp and null pm25 2025-05-27 22:44:47 -04:00
201 changed files with 9902 additions and 11418 deletions

View File

@ -21,7 +21,6 @@ module.exports = {
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
"@typescript-eslint/no-unused-vars": 'warn',
},
settings: {
'import/resolver': {

View File

@ -1,17 +0,0 @@
<!--
Thanks for contributing!
Provide a description of your changes below and a general summary in the title.
-->
## Jira Ticket
[SP-0000](https://syncrow.atlassian.net/browse/SP-0000)
## Description
<!--- Describe your changes in detail -->
## How to Test
<!--- Describe the created APIs / Logic -->

View File

@ -1,7 +1,4 @@
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy
# More GitHub Actions for Azure: https://github.com/Azure/actions
name: Build and deploy container app to Azure Web App - syncrow(staging)
name: Backend deployment to Azure App Service
on:
push:
@ -9,43 +6,50 @@ on:
- main
workflow_dispatch:
env:
AZURE_WEB_APP_NAME: 'syncrow'
AZURE_WEB_APP_SLOT_NAME: 'staging'
ACR_REGISTRY: 'syncrow.azurecr.io'
IMAGE_NAME: 'backend'
IMAGE_TAG: 'latest'
jobs:
build:
runs-on: 'ubuntu-latest'
steps:
- uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to registry
uses: docker/login-action@v2
with:
registry: https://syncrow.azurecr.io/
username: ${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}
password: ${{ secrets.AzureAppService_ContainerPassword_e7b0ff54f54d44cba04a970a22384848 }}
- name: Build and push container image to registry
uses: docker/build-push-action@v3
with:
push: true
tags: syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}
file: ./Dockerfile
deploy:
build_and_deploy:
runs-on: ubuntu-latest
needs: build
environment:
name: 'staging'
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
steps:
- name: Deploy to Azure Web App
id: deploy-to-webapp
uses: azure/webapps-deploy@v2
- uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v3
with:
app-name: 'syncrow'
slot-name: 'staging'
publish-profile: ${{ secrets.AzureAppService_PublishProfile_44f7766441ec4796b74789e9761ef589 }}
images: 'syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}'
node-version: '20'
- name: Install dependencies and build project
run: |
npm install
npm run build
- name: Log in to Azure
uses: azure/login@v1
with:
creds: ${{ secrets.AZURE_CREDENTIALS }}
- name: Log in to Azure Container Registry
run: az acr login --name ${{ env.ACR_REGISTRY }}
- name: List build output
run: ls -R dist/
- name: Build and push Docker image
run: |
docker build . -t ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
docker push ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
- name: Set Web App with Docker container
run: |
az webapp config container set \
--name ${{ env.AZURE_WEB_APP_NAME }} \
--resource-group backend \
--docker-custom-image-name ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} \
--docker-registry-server-url https://${{ env.ACR_REGISTRY }}

View File

@ -1,73 +0,0 @@
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy
# More GitHub Actions for Azure: https://github.com/Azure/actions
name: Build and deploy Node.js app to Azure Web App - syncrow
on:
push:
branches:
- main
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read #This is required for actions/checkout
steps:
- uses: actions/checkout@v4
- name: Set up Node.js version
uses: actions/setup-node@v3
with:
node-version: '20.x'
- name: npm install, build, and test
run: |
npm install
npm run build --if-present
npm run test --if-present
- name: Zip artifact for deployment
run: zip release.zip ./* -r
- name: Upload artifact for deployment job
uses: actions/upload-artifact@v4
with:
name: node-app
path: release.zip
deploy:
runs-on: ubuntu-latest
needs: build
environment:
name: 'stg'
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
permissions:
id-token: write #This is required for requesting the JWT
contents: read #This is required for actions/checkout
steps:
- name: Download artifact from build job
uses: actions/download-artifact@v4
with:
name: node-app
- name: Unzip artifact for deployment
run: unzip release.zip
- name: Login to Azure
uses: azure/login@v2
with:
client-id: ${{ secrets.AZUREAPPSERVICE_CLIENTID_515C8E782CFF431AB20448C85CA0FE58 }}
tenant-id: ${{ secrets.AZUREAPPSERVICE_TENANTID_2AEFE5534424490387C08FAE41573CC2 }}
subscription-id: ${{ secrets.AZUREAPPSERVICE_SUBSCRIPTIONID_00623C33023749FEA5F6BC36884F9C8A }}
- name: 'Deploy to Azure Web App'
id: deploy-to-webapp
uses: azure/webapps-deploy@v3
with:
app-name: 'syncrow'
slot-name: 'stg'
package: .

2
.gitignore vendored
View File

@ -4,7 +4,7 @@
/build
#github
/.github/workflows
/.github
# Logs
logs

View File

@ -1,19 +1,18 @@
import { PlatformType } from '@app/common/constants/platform-type.enum';
import { RoleType } from '@app/common/constants/role.type.enum';
import { UserEntity } from '@app/common/modules/user/entities';
import {
BadRequestException,
Injectable,
UnauthorizedException,
} from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { JwtService } from '@nestjs/jwt';
import * as argon2 from 'argon2';
import { OAuth2Client } from 'google-auth-library';
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
import { UserRepository } from '../../../../common/src/modules/user/repositories';
import { HelperHashService } from '../../helper/services';
import { UserRepository } from '../../../../common/src/modules/user/repositories';
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
import { ConfigService } from '@nestjs/config';
import { OAuth2Client } from 'google-auth-library';
import { PlatformType } from '@app/common/constants/platform-type.enum';
import { RoleType } from '@app/common/constants/role.type.enum';
@Injectable()
export class AuthService {
@ -33,7 +32,7 @@ export class AuthService {
pass: string,
regionUuid?: string,
platform?: PlatformType,
): Promise<Omit<UserEntity, 'password'>> {
): Promise<any> {
const user = await this.userRepository.findOne({
where: {
email,
@ -41,17 +40,16 @@ export class AuthService {
},
relations: ['roleType', 'project'],
});
if (!user) {
throw new BadRequestException('Invalid credentials');
}
if (
platform === PlatformType.WEB &&
[RoleType.SPACE_OWNER, RoleType.SPACE_MEMBER].includes(
user.roleType.type as RoleType,
)
(user.roleType.type === RoleType.SPACE_OWNER ||
user.roleType.type === RoleType.SPACE_MEMBER)
) {
throw new UnauthorizedException('Access denied for web platform');
}
if (!user) {
throw new BadRequestException('Invalid credentials');
}
if (!user.isUserVerified) {
throw new BadRequestException('User is not verified');
@ -71,9 +69,8 @@ export class AuthService {
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// const { password, ...result } = user;
delete user.password;
return user;
const { password, ...result } = user;
return result;
}
async createSession(data): Promise<UserSessionEntity> {
@ -116,7 +113,6 @@ export class AuthService {
hasAcceptedWebAgreement: user.hasAcceptedWebAgreement,
hasAcceptedAppAgreement: user.hasAcceptedAppAgreement,
project: user?.project,
bookingPoints: user?.bookingPoints,
};
if (payload.googleCode) {
const profile = await this.getProfile(payload.googleCode);

View File

@ -10,8 +10,6 @@ export default registerAs(
SMTP_USER: process.env.SMTP_USER,
SMTP_SENDER: process.env.SMTP_SENDER,
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
BATCH_EMAIL_API_URL: process.env.BATCH_EMAIL_API_URL,
SEND_EMAIL_API_URL: process.env.SEND_EMAIL_API_URL,
MAILTRAP_API_TOKEN: process.env.MAILTRAP_API_TOKEN,
MAILTRAP_INVITATION_TEMPLATE_UUID:
process.env.MAILTRAP_INVITATION_TEMPLATE_UUID,
@ -23,9 +21,5 @@ export default registerAs(
process.env.MAILTRAP_EDIT_USER_TEMPLATE_UUID,
MAILTRAP_SEND_OTP_TEMPLATE_UUID:
process.env.MAILTRAP_SEND_OTP_TEMPLATE_UUID,
MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID:
process.env.MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID,
MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID:
process.env.MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID,
}),
);

View File

@ -69,47 +69,7 @@ export class ControllerRoute {
'Retrieve the list of all regions registered in Syncrow.';
};
};
static BOOKABLE_SPACES = class {
public static readonly ROUTE = 'bookable-spaces';
static ACTIONS = class {
public static readonly ADD_BOOKABLE_SPACES_SUMMARY =
'Add new bookable spaces';
public static readonly ADD_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint allows you to add new bookable spaces by providing the required details.';
public static readonly GET_ALL_BOOKABLE_SPACES_SUMMARY =
'Get all bookable spaces';
public static readonly GET_ALL_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint retrieves all bookable spaces.';
public static readonly UPDATE_BOOKABLE_SPACES_SUMMARY =
'Update existing bookable spaces';
public static readonly UPDATE_BOOKABLE_SPACES_DESCRIPTION =
'This endpoint allows you to update existing bookable spaces by providing the required details.';
};
};
static BOOKING = class {
public static readonly ROUTE = 'bookings';
static ACTIONS = class {
public static readonly ADD_BOOKING_SUMMARY = 'Add new booking';
public static readonly ADD_BOOKING_DESCRIPTION =
'This endpoint allows you to add new booking by providing the required details.';
public static readonly GET_ALL_BOOKINGS_SUMMARY = 'Get all bookings';
public static readonly GET_ALL_BOOKINGS_DESCRIPTION =
'This endpoint retrieves all bookings.';
public static readonly GET_MY_BOOKINGS_SUMMARY = 'Get my bookings';
public static readonly GET_MY_BOOKINGS_DESCRIPTION =
'This endpoint retrieves all bookings for the authenticated user.';
};
};
static COMMUNITY = class {
public static readonly ROUTE = '/projects/:projectUuid/communities';
static ACTIONS = class {
@ -239,11 +199,6 @@ export class ControllerRoute {
public static readonly UPDATE_SPACE_DESCRIPTION =
'Updates a space by its UUID and community ID. You can update the name, parent space, and other properties. If a parent space is provided and not already a parent, its `isParent` flag will be set to true.';
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_SUMMARY =
'Update the order of child spaces under a specific parent space';
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_DESCRIPTION =
'Updates the order of child spaces under a specific parent space. You can provide a new order for the child spaces.';
public static readonly GET_HEIRARCHY_SUMMARY = 'Get space hierarchy';
public static readonly GET_HEIRARCHY_DESCRIPTION =
'This endpoint retrieves the hierarchical structure of spaces under a given space ID. It returns all the child spaces nested within the specified space, organized by their parent-child relationships. ';
@ -442,11 +397,6 @@ export class ControllerRoute {
public static readonly DELETE_USER_SUMMARY = 'Delete user by UUID';
public static readonly DELETE_USER_DESCRIPTION =
'This endpoint deletes a user identified by their UUID. Accessible only by users with the Super Admin role.';
public static readonly DELETE_USER_PROFILE_SUMMARY =
'Delete user profile by UUID';
public static readonly DELETE_USER_PROFILE_DESCRIPTION =
'This endpoint deletes a user profile identified by their UUID. Accessible only by users with the Super Admin role.';
public static readonly UPDATE_USER_WEB_AGREEMENT_SUMMARY =
'Update user web agreement by user UUID';
public static readonly UPDATE_USER_WEB_AGREEMENT_DESCRIPTION =
@ -515,16 +465,7 @@ export class ControllerRoute {
'This endpoint retrieves the terms and conditions for the application.';
};
};
static WEATHER = class {
public static readonly ROUTE = 'weather';
static ACTIONS = class {
public static readonly FETCH_WEATHER_DETAILS_SUMMARY =
'Fetch Weather Details';
public static readonly FETCH_WEATHER_DETAILS_DESCRIPTION =
'This endpoint retrieves the current weather details for a specified location like temperature, humidity, etc.';
};
};
static PRIVACY_POLICY = class {
public static readonly ROUTE = 'policy';
@ -551,6 +492,7 @@ export class ControllerRoute {
};
static PowerClamp = class {
public static readonly ROUTE = 'power-clamp';
static ACTIONS = class {
public static readonly GET_ENERGY_SUMMARY =
'Get power clamp historical data';
@ -573,20 +515,6 @@ export class ControllerRoute {
'This endpoint retrieves the occupancy heat map data based on the provided parameters.';
};
};
static AQI = class {
public static readonly ROUTE = 'aqi';
static ACTIONS = class {
public static readonly GET_AQI_RANGE_DATA_SUMMARY = 'Get AQI range data';
public static readonly GET_AQI_RANGE_DATA_DESCRIPTION =
'This endpoint retrieves the AQI (Air Quality Index) range data based on the provided parameters.';
public static readonly GET_AQI_DISTRIBUTION_DATA_SUMMARY =
'Get AQI distribution data';
public static readonly GET_AQI_DISTRIBUTION_DATA_DESCRIPTION =
'This endpoint retrieves the AQI (Air Quality Index) distribution data based on the provided parameters.';
};
};
static DEVICE = class {
public static readonly ROUTE = 'devices';
@ -677,11 +605,6 @@ export class ControllerRoute {
'Delete scenes by device uuid and switch name';
public static readonly DELETE_SCENES_BY_SWITCH_NAME_DESCRIPTION =
'This endpoint deletes all scenes associated with a specific switch device.';
public static readonly POPULATE_TUYA_CONST_UUID_SUMMARY =
'Populate Tuya const UUID';
public static readonly POPULATE_TUYA_CONST_UUID_DESCRIPTION =
'This endpoint populates the Tuya const UUID for all devices.';
};
};
static DEVICE_COMMISSION = class {

View File

@ -0,0 +1,3 @@
export const SEND_EMAIL_API_URL_PROD = 'https://send.api.mailtrap.io/api/send/';
export const SEND_EMAIL_API_URL_DEV =
'https://sandbox.api.mailtrap.io/api/send/2634012';

View File

@ -1,8 +0,0 @@
export enum PollutantType {
AQI = 'aqi',
PM25 = 'pm25',
PM10 = 'pm10',
VOC = 'voc',
CO2 = 'co2',
CH2O = 'ch2o',
}

View File

@ -15,10 +15,8 @@ export enum ProductType {
WL = 'WL',
GD = 'GD',
CUR = 'CUR',
CUR_2 = 'CUR_2',
PC = 'PC',
FOUR_S = '4S',
SIX_S = '6S',
SOS = 'SOS',
AQI = 'AQI',
}

View File

@ -1,33 +1,51 @@
import { Module } from '@nestjs/common';
import { ConfigModule, ConfigService } from '@nestjs/config';
import { TypeOrmModule } from '@nestjs/typeorm';
import { DeviceEntity } from '../modules/device/entities';
import { PermissionTypeEntity } from '../modules/permission/entities';
import { ProductEntity } from '../modules/product/entities';
import { SnakeNamingStrategy } from './strategies';
import { UserEntity } from '../modules/user/entities/user.entity';
import { UserSessionEntity } from '../modules/session/entities/session.entity';
import { UserOtpEntity } from '../modules/user/entities';
import { UserEntity } from '../modules/user/entities/user.entity';
import { SnakeNamingStrategy } from './strategies';
import { ProductEntity } from '../modules/product/entities';
import { DeviceEntity } from '../modules/device/entities';
import { PermissionTypeEntity } from '../modules/permission/entities';
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger';
import { createLogger } from 'winston';
import { winstonLoggerOptions } from '../logger/services/winston.logger';
import { AqiSpaceDailyPollutantStatsEntity } from '../modules/aqi/entities';
import { AutomationEntity } from '../modules/automation/entities';
import { BookableSpaceEntity } from '../modules/booking/entities/bookable-space.entity';
import { BookingEntity } from '../modules/booking/entities/booking.entity';
import { ClientEntity } from '../modules/client/entities';
import { UserSpaceEntity } from '../modules/user/entities';
import { DeviceUserPermissionEntity } from '../modules/device/entities';
import { RoleTypeEntity } from '../modules/role-type/entities';
import { UserNotificationEntity } from '../modules/user/entities';
import { DeviceNotificationEntity } from '../modules/device/entities';
import { RegionEntity } from '../modules/region/entities';
import { TimeZoneEntity } from '../modules/timezone/entities';
import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
import { CommunityEntity } from '../modules/community/entities';
import { DeviceStatusLogEntity } from '../modules/device-status-log/entities';
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
import { SceneDeviceEntity } from '../modules/scene-device/entities';
import { ProjectEntity } from '../modules/project/entities';
import {
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../modules/device/entities';
SpaceModelEntity,
SubspaceModelEntity,
TagModel,
SpaceModelProductAllocationEntity,
SubspaceModelProductAllocationEntity,
} from '../modules/space-model/entities';
import {
InviteUserEntity,
InviteUserSpaceEntity,
} from '../modules/Invite-user/entities';
import { SpaceDailyOccupancyDurationEntity } from '../modules/occupancy/entities';
import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
import { AutomationEntity } from '../modules/automation/entities';
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
import { SpaceEntity } from '../modules/space/entities/space.entity';
import { SpaceLinkEntity } from '../modules/space/entities/space-link.entity';
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
import { TagEntity } from '../modules/space/entities/tag.entity';
import { ClientEntity } from '../modules/client/entities';
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger';
import { createLogger } from 'winston';
import { winstonLoggerOptions } from '../logger/services/winston.logger';
import {
PowerClampDailyEntity,
PowerClampHourlyEntity,
@ -37,29 +55,6 @@ import {
PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity,
} from '../modules/presence-sensor/entities';
import { ProjectEntity } from '../modules/project/entities';
import { RegionEntity } from '../modules/region/entities';
import { RoleTypeEntity } from '../modules/role-type/entities';
import { SceneDeviceEntity } from '../modules/scene-device/entities';
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
import {
SpaceModelEntity,
SpaceModelProductAllocationEntity,
SubspaceModelEntity,
SubspaceModelProductAllocationEntity,
} from '../modules/space-model/entities';
import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
import { SpaceEntity } from '../modules/space/entities/space.entity';
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
import { TimeZoneEntity } from '../modules/timezone/entities';
import {
UserNotificationEntity,
UserSpaceEntity,
} from '../modules/user/entities';
import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
@Module({
imports: [
TypeOrmModule.forRootAsync({
@ -88,7 +83,9 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
PermissionTypeEntity,
CommunityEntity,
SpaceEntity,
SpaceLinkEntity,
SubspaceEntity,
TagEntity,
UserSpaceEntity,
DeviceUserPermissionEntity,
RoleTypeEntity,
@ -103,6 +100,7 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
SceneDeviceEntity,
SpaceModelEntity,
SubspaceModelEntity,
TagModel,
InviteUserEntity,
InviteUserSpaceEntity,
InviteSpaceEntity,
@ -117,10 +115,6 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
PowerClampMonthlyEntity,
PresenceSensorDailyDeviceEntity,
PresenceSensorDailySpaceEntity,
AqiSpaceDailyPollutantStatsEntity,
SpaceDailyOccupancyDurationEntity,
BookableSpaceEntity,
BookingEntity,
],
namingStrategy: new SnakeNamingStrategy(),
synchronize: Boolean(JSON.parse(configService.get('DB_SYNC'))),
@ -129,8 +123,8 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
logger: typeOrmLogger,
extra: {
charset: 'utf8mb4',
max: 100, // set pool max size
idleTimeoutMillis: 3000, // close idle clients after 5 second
max: 20, // set pool max size
idleTimeoutMillis: 5000, // close idle clients after 5 second
connectionTimeoutMillis: 12_000, // return an error after 11 second if connection could not be established
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
},

View File

@ -1,9 +1,10 @@
import { ApiProperty } from '@nestjs/swagger';
import { Transform } from 'class-transformer';
import { IsBoolean, IsOptional } from 'class-validator';
import { BooleanValues } from '../constants/boolean-values.enum';
import { IsBoolean, IsDate, IsOptional } from 'class-validator';
import { IsPageRequestParam } from '../validators/is-page-request-param.validator';
import { ApiProperty } from '@nestjs/swagger';
import { IsSizeRequestParam } from '../validators/is-size-request-param.validator';
import { Transform } from 'class-transformer';
import { parseToDate } from '../util/parseToDate';
import { BooleanValues } from '../constants/boolean-values.enum';
export class PaginationRequestGetListDto {
@ApiProperty({
@ -18,7 +19,6 @@ export class PaginationRequestGetListDto {
return value.obj.includeSpaces === BooleanValues.TRUE;
})
public includeSpaces?: boolean = false;
@IsOptional()
@IsPageRequestParam({
message: 'Page must be bigger than 0',
@ -40,4 +40,40 @@ export class PaginationRequestGetListDto {
description: 'Size request',
})
size?: number;
@IsOptional()
@ApiProperty({
name: 'name',
required: false,
description: 'Name to be filtered',
})
name?: string;
@ApiProperty({
name: 'from',
required: false,
type: Number,
description: `Start time in UNIX timestamp format to filter`,
example: 1674172800000,
})
@IsOptional()
@Transform(({ value }) => parseToDate(value))
@IsDate({
message: `From must be in UNIX timestamp format in order to parse to Date instance`,
})
from?: Date;
@ApiProperty({
name: 'to',
required: false,
type: Number,
description: `End time in UNIX timestamp format to filter`,
example: 1674259200000,
})
@IsOptional()
@Transform(({ value }) => parseToDate(value))
@IsDate({
message: `To must be in UNIX timestamp format in order to parse to Date instance`,
})
to?: Date;
}

View File

@ -3,12 +3,26 @@ import { DeviceStatusFirebaseController } from './controllers/devices-status.con
import { DeviceStatusFirebaseService } from './services/devices-status.service';
import { DeviceRepository } from '@app/common/modules/device/repositories';
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories/device-status.repository';
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
import {
PowerClampHourlyRepository,
PowerClampDailyRepository,
PowerClampMonthlyRepository,
} from '@app/common/modules/power-clamp/repositories';
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
import { OccupancyService } from '@app/common/helper/services/occupancy.service';
@Module({
providers: [
DeviceStatusFirebaseService,
DeviceRepository,
DeviceStatusLogRepository,
PowerClampService,
PowerClampHourlyRepository,
PowerClampDailyRepository,
PowerClampMonthlyRepository,
SqlLoaderService,
OccupancyService,
],
controllers: [DeviceStatusFirebaseController],
exports: [DeviceStatusFirebaseService, DeviceStatusLogRepository],

View File

@ -13,7 +13,6 @@ class StatusDto {
@IsNotEmpty()
value: any;
t?: string | number | Date;
}
export class AddDeviceStatusDto {

View File

@ -18,15 +18,20 @@ import {
runTransaction,
} from 'firebase/database';
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories';
import { ProductType } from '@app/common/constants/product-type.enum';
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
import { PowerClampEnergyEnum } from '@app/common/constants/power.clamp.enargy.enum';
import { PresenceSensorEnum } from '@app/common/constants/presence.sensor.enum';
import { OccupancyService } from '@app/common/helper/services/occupancy.service';
@Injectable()
export class DeviceStatusFirebaseService {
private tuya: TuyaContext;
private firebaseDb: Database;
private readonly isDevEnv: boolean;
constructor(
private readonly configService: ConfigService,
private readonly deviceRepository: DeviceRepository,
private readonly powerClampService: PowerClampService,
private readonly occupancyService: OccupancyService,
private deviceStatusLogRepository: DeviceStatusLogRepository,
) {
const accessKey = this.configService.get<string>('auth-config.ACCESS_KEY');
@ -40,8 +45,6 @@ export class DeviceStatusFirebaseService {
// Initialize firebaseDb using firebaseDataBase function
this.firebaseDb = firebaseDataBase(this.configService);
this.isDevEnv =
this.configService.get<string>('NODE_ENV') === 'development';
}
async addDeviceStatusByDeviceUuid(
deviceTuyaUuid: string,
@ -56,7 +59,7 @@ export class DeviceStatusFirebaseService {
const deviceStatusSaved = await this.createDeviceStatusFirebase({
deviceUuid: device.uuid,
deviceTuyaUuid: deviceTuyaUuid,
status: deviceStatus?.status,
status: deviceStatus.status,
productUuid: deviceStatus.productUuid,
productType: deviceStatus.productType,
});
@ -71,94 +74,25 @@ export class DeviceStatusFirebaseService {
);
}
}
async addBatchDeviceStatusToOurDb(
batch: {
deviceTuyaUuid: string;
status: any;
log: any;
device: any;
}[],
): Promise<void> {
const allLogs = [];
console.log(`🔁 Preparing logs from batch of ${batch.length} items...`);
for (const item of batch) {
const device = item.device;
if (!device?.uuid) {
console.log(`⛔ Skipped unknown device: ${item.deviceTuyaUuid}`);
continue;
}
const logs = item.log.properties.map((property) =>
this.deviceStatusLogRepository.create({
deviceId: device.uuid,
deviceTuyaId: item.deviceTuyaUuid,
productId: item.log.productId,
log: item.log,
code: property.code,
value: property.value,
eventId: item.log.dataId,
eventTime: new Date(property.time).toISOString(),
}),
);
allLogs.push(...logs);
}
console.log(`📝 Total logs to insert: ${allLogs.length}`);
const insertLogsPromise = (async () => {
const chunkSize = 300;
let insertedCount = 0;
for (let i = 0; i < allLogs.length; i += chunkSize) {
const chunk = allLogs.slice(i, i + chunkSize);
try {
const result = await this.deviceStatusLogRepository
.createQueryBuilder()
.insert()
.into('device-status-log') // or use DeviceStatusLogEntity
.values(chunk)
.orIgnore() // skip duplicates
.execute();
insertedCount += result.identifiers.length;
console.log(
`✅ Inserted ${result.identifiers.length} / ${chunk.length} logs (chunk)`,
);
} catch (error) {
console.error('❌ Insert error (skipped chunk):', error.message);
}
}
console.log(
`✅ Total logs inserted: ${insertedCount} / ${allLogs.length}`,
);
})();
await insertLogsPromise;
}
async addDeviceStatusToFirebase(
addDeviceStatusDto: AddDeviceStatusDto & { device?: any },
addDeviceStatusDto: AddDeviceStatusDto,
): Promise<AddDeviceStatusDto | null> {
try {
let device = addDeviceStatusDto.device;
if (!device) {
device = await this.getDeviceByDeviceTuyaUuid(
addDeviceStatusDto.deviceTuyaUuid,
);
}
const device = await this.getDeviceByDeviceTuyaUuid(
addDeviceStatusDto.deviceTuyaUuid,
);
if (device?.uuid) {
return await this.createDeviceStatusFirebase({
deviceUuid: device.uuid,
...addDeviceStatusDto,
productType: device.productDevice?.prodType,
productType: device.productDevice.prodType,
});
}
// Return null if device not found or no UUID
return null;
} catch (error) {
// Handle the error silently, perhaps log it internally or ignore it
return null;
}
}
@ -172,15 +106,6 @@ export class DeviceStatusFirebaseService {
relations: ['productDevice'],
});
}
async getAllDevices() {
return await this.deviceRepository.find({
where: {
isActive: true,
},
relations: ['productDevice'],
});
}
async getDevicesInstructionStatus(deviceUuid: string) {
try {
const deviceDetails = await this.getDeviceByDeviceUuid(deviceUuid);
@ -195,7 +120,7 @@ export class DeviceStatusFirebaseService {
return {
productUuid: deviceDetails.productDevice.uuid,
productType: deviceDetails.productDevice.prodType,
status: deviceStatus.result[0]?.status,
status: deviceStatus.result[0].status,
};
} catch (error) {
throw new HttpException(
@ -260,18 +185,18 @@ export class DeviceStatusFirebaseService {
if (!existingData.productType) {
existingData.productType = addDeviceStatusDto.productType;
}
if (!existingData?.status) {
if (!existingData.status) {
existingData.status = [];
}
// Create a map to track existing status codes
const statusMap = new Map(
existingData?.status.map((item) => [item.code, item.value]),
existingData.status.map((item) => [item.code, item.value]),
);
// Update or add status codes
for (const statusItem of addDeviceStatusDto?.status) {
for (const statusItem of addDeviceStatusDto.status) {
statusMap.set(statusItem.code, statusItem.value);
}
@ -284,6 +209,60 @@ export class DeviceStatusFirebaseService {
return existingData;
});
// Save logs to your repository
const newLogs = addDeviceStatusDto.log.properties.map((property) => {
return this.deviceStatusLogRepository.create({
deviceId: addDeviceStatusDto.deviceUuid,
deviceTuyaId: addDeviceStatusDto.deviceTuyaUuid,
productId: addDeviceStatusDto.log.productId,
log: addDeviceStatusDto.log,
code: property.code,
value: property.value,
eventId: addDeviceStatusDto.log.dataId,
eventTime: new Date(property.time).toISOString(),
});
});
await this.deviceStatusLogRepository.save(newLogs);
if (addDeviceStatusDto.productType === ProductType.PC) {
const energyCodes = new Set([
PowerClampEnergyEnum.ENERGY_CONSUMED,
PowerClampEnergyEnum.ENERGY_CONSUMED_A,
PowerClampEnergyEnum.ENERGY_CONSUMED_B,
PowerClampEnergyEnum.ENERGY_CONSUMED_C,
]);
const energyStatus = addDeviceStatusDto?.log?.properties?.find((status) =>
energyCodes.has(status.code),
);
if (energyStatus) {
await this.powerClampService.updateEnergyConsumedHistoricalData(
addDeviceStatusDto.deviceUuid,
);
}
}
if (
addDeviceStatusDto.productType === ProductType.CPS ||
addDeviceStatusDto.productType === ProductType.WPS
) {
const occupancyCodes = new Set([PresenceSensorEnum.PRESENCE_STATE]);
const occupancyStatus = addDeviceStatusDto?.log?.properties?.find(
(status) => occupancyCodes.has(status.code),
);
if (occupancyStatus) {
await this.occupancyService.updateOccupancySensorHistoricalData(
addDeviceStatusDto.deviceUuid,
);
await this.occupancyService.updateOccupancySensorHistoricalDurationData(
addDeviceStatusDto.deviceUuid,
);
}
}
// Return the updated data
const snapshot: DataSnapshot = await get(dataRef);
return snapshot.val();

View File

@ -8,10 +8,7 @@ import { TuyaWebSocketService } from './services/tuya.web.socket.service';
import { OneSignalService } from './services/onesignal.service';
import { DeviceMessagesService } from './services/device.messages.service';
import { DeviceRepositoryModule } from '../modules/device/device.repository.module';
import {
DeviceNotificationRepository,
DeviceRepository,
} from '../modules/device/repositories';
import { DeviceNotificationRepository } from '../modules/device/repositories';
import { DeviceStatusFirebaseModule } from '../firebase/devices-status/devices-status.module';
import { CommunityPermissionService } from './services/community.permission.service';
import { CommunityRepository } from '../modules/community/repositories';
@ -30,7 +27,6 @@ import { SosHandlerService } from './services/sos.handler.service';
DeviceNotificationRepository,
CommunityRepository,
SosHandlerService,
DeviceRepository,
],
exports: [
HelperHashService,

View File

@ -1,66 +0,0 @@
import { Injectable } from '@nestjs/common';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable()
export class AqiDataService {
constructor(
private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource,
) {}
async updateAQISensorHistoricalData(): Promise<void> {
try {
const { dateStr } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'proceduce_update_daily_space_aqi',
[dateStr],
'fact_daily_space_aqi',
),
]);
} catch (err) {
console.error('Failed to update AQI sensor historical data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
};
}
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
);
}
console.error(`Failed to execute ${procedureFileName}:`, err);
throw err;
}
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}
}

View File

@ -1,69 +1,66 @@
import { DeviceRepository } from '@app/common/modules/device/repositories';
import { Injectable } from '@nestjs/common';
import { SqlLoaderService } from './sql-loader.service';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable()
export class OccupancyService {
constructor(
private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource,
private readonly deviceRepository: DeviceRepository,
) {}
async updateOccupancyDataProcedures(): Promise<void> {
try {
const { dateStr } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'procedure_update_fact_space_occupancy',
[dateStr],
'fact_space_occupancy_count',
),
this.executeProcedureWithRetry(
'procedure_update_daily_space_occupancy_duration',
[dateStr],
'fact_daily_space_occupancy_duration',
),
]);
} catch (err) {
console.error('Failed to update occupancy data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
};
}
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
async updateOccupancySensorHistoricalDurationData(
deviceUuid: string,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
const now = new Date();
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
const device = await this.deviceRepository.findOne({
where: { uuid: deviceUuid },
relations: ['spaceDevice'],
});
await this.executeProcedure(
'fact_daily_space_occupancy_duration',
'procedure_update_daily_space_occupancy_duration',
[dateStr, device.spaceDevice?.uuid],
);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
);
}
console.error(`Failed to execute ${procedureFileName}:`, err);
console.error('Failed to insert or update occupancy duration data:', err);
throw err;
}
}
async updateOccupancySensorHistoricalData(deviceUuid: string): Promise<void> {
try {
const now = new Date();
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
const device = await this.deviceRepository.findOne({
where: { uuid: deviceUuid },
relations: ['spaceDevice'],
});
await this.executeProcedure(
'fact_space_occupancy_count',
'procedure_update_fact_space_occupancy',
[dateStr, device.spaceDevice?.uuid],
);
} catch (err) {
console.error('Failed to insert or update occupancy data:', err);
throw err;
}
}
private async executeProcedure(
procedureFolderName: string,
procedureFileName: string,
params: (string | number | null)[],
): Promise<void> {
const query = this.loadQuery(procedureFolderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);

View File

@ -1,7 +1,7 @@
import { Injectable } from '@nestjs/common';
import { SqlLoaderService } from './sql-loader.service';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { SqlLoaderService } from './sql-loader.service';
@Injectable()
export class PowerClampService {
@ -10,74 +10,50 @@ export class PowerClampService {
private readonly dataSource: DataSource,
) {}
async updateEnergyConsumedHistoricalData(): Promise<void> {
async updateEnergyConsumedHistoricalData(deviceUuid: string): Promise<void> {
try {
const { dateStr, monthYear } = this.getFormattedDates();
// Execute all procedures in parallel
await Promise.all([
this.executeProcedureWithRetry(
'fact_hourly_device_energy_consumed_procedure',
[dateStr],
'fact_device_energy_consumed',
),
this.executeProcedureWithRetry(
'fact_daily_device_energy_consumed_procedure',
[dateStr],
'fact_device_energy_consumed',
),
this.executeProcedureWithRetry(
'fact_monthly_device_energy_consumed_procedure',
[monthYear],
'fact_device_energy_consumed',
),
]);
} catch (err) {
console.error('Failed to update energy consumption data:', err);
throw err;
}
}
private getFormattedDates(): { dateStr: string; monthYear: string } {
const now = new Date();
return {
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
monthYear: now
const now = new Date();
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
const hour = now.getHours();
const monthYear = now
.toLocaleDateString('en-US', {
month: '2-digit',
year: 'numeric',
})
.replace('/', '-'), // MM-YYYY
};
}
.replace('/', '-'); // MM-YYYY
private async executeProcedureWithRetry(
procedureFileName: string,
params: (string | number | null)[],
folderName: string,
retries = 3,
): Promise<void> {
try {
const query = this.loadQuery(folderName, procedureFileName);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
await this.executeProcedure(
'fact_hourly_device_energy_consumed_procedure',
[deviceUuid, dateStr, hour],
);
await this.executeProcedure(
'fact_daily_device_energy_consumed_procedure',
[deviceUuid, dateStr],
);
await this.executeProcedure(
'fact_monthly_device_energy_consumed_procedure',
[deviceUuid, monthYear],
);
} catch (err) {
if (retries > 0) {
const delayMs = 1000 * (4 - retries); // Exponential backoff
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
return this.executeProcedureWithRetry(
procedureFileName,
params,
folderName,
retries - 1,
);
}
console.error(`Failed to execute ${procedureFileName}:`, err);
console.error('Failed to insert or update energy data:', err);
throw err;
}
}
private async executeProcedure(
procedureFileName: string,
params: (string | number | null)[],
): Promise<void> {
const query = this.loadQuery(
'fact_device_energy_consumed',
procedureFileName,
);
await this.dataSource.query(query, params);
console.log(`Procedure ${procedureFileName} executed successfully.`);
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}

View File

@ -16,46 +16,21 @@ export class SosHandlerService {
);
}
async handleSosEventFirebase(device: any, logData: any): Promise<void> {
const sosTrueStatus = [{ code: 'sos', value: true }];
const sosFalseStatus = [{ code: 'sos', value: false }];
async handleSosEvent(devId: string, logData: any): Promise<void> {
try {
// ✅ Send true status
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosTrueStatus,
deviceTuyaUuid: devId,
status: [{ code: 'sos', value: true }],
log: logData,
device,
});
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
{
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosTrueStatus,
log: logData,
device,
},
]);
// ✅ Schedule false status
setTimeout(async () => {
try {
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosFalseStatus,
deviceTuyaUuid: devId,
status: [{ code: 'sos', value: false }],
log: logData,
device,
});
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
{
deviceTuyaUuid: device.deviceTuyaUuid,
status: sosFalseStatus,
log: logData,
device,
},
]);
} catch (err) {
this.logger.error('Failed to send SOS false value', err);
}

View File

@ -1,24 +1,13 @@
import { Injectable, OnModuleInit } from '@nestjs/common';
import { Injectable } from '@nestjs/common';
import TuyaWebsocket from '../../config/tuya-web-socket-config';
import { ConfigService } from '@nestjs/config';
import { DeviceStatusFirebaseService } from '@app/common/firebase/devices-status/services/devices-status.service';
import { SosHandlerService } from './sos.handler.service';
import * as NodeCache from 'node-cache';
@Injectable()
export class TuyaWebSocketService implements OnModuleInit {
export class TuyaWebSocketService {
private client: any;
private readonly isDevEnv: boolean;
private readonly deviceCache = new NodeCache({ stdTTL: 7200 }); // TTL = 2 hour
private messageQueue: {
devId: string;
status: any;
logData: any;
device: any;
}[] = [];
private isProcessing = false;
constructor(
private readonly configService: ConfigService,
@ -37,36 +26,16 @@ export class TuyaWebSocketService implements OnModuleInit {
});
if (this.configService.get<string>('tuya-config.TRUN_ON_TUYA_SOCKET')) {
// Set up event handlers
this.setupEventHandlers();
// Start receiving messages
this.client.start();
}
// Run the queue processor every 15 seconds
setInterval(() => this.processQueue(), 15000);
// Refresh the cache every 1 hour
setInterval(() => this.initializeDeviceCache(), 30 * 60 * 1000); // 30 minutes
}
async onModuleInit() {
await this.initializeDeviceCache();
}
private async initializeDeviceCache() {
try {
const allDevices = await this.deviceStatusFirebaseService.getAllDevices();
allDevices.forEach((device) => {
if (device.deviceTuyaUuid) {
this.deviceCache.set(device.deviceTuyaUuid, device);
}
});
console.log(`✅ Refreshed cache with ${allDevices.length} devices.`);
} catch (error) {
console.error('❌ Failed to initialize device cache:', error);
}
}
private setupEventHandlers() {
// Event handlers
this.client.open(() => {
console.log('open');
});
@ -74,38 +43,23 @@ export class TuyaWebSocketService implements OnModuleInit {
this.client.message(async (ws: WebSocket, message: any) => {
try {
const { devId, status, logData } = this.extractMessageData(message);
if (!Array.isArray(logData?.properties)) {
this.client.ackMessage(message.messageId);
return;
}
const device = this.deviceCache.get(devId);
if (!device) {
// console.log(⛔ Unknown device: ${devId}, message ignored.);
this.client.ackMessage(message.messageId);
return;
}
if (this.sosHandlerService.isSosTriggered(status)) {
await this.sosHandlerService.handleSosEventFirebase(devId, logData);
await this.sosHandlerService.handleSosEvent(devId, logData);
} else {
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
deviceTuyaUuid: devId,
status,
status: status,
log: logData,
device,
});
}
// Push to internal queue
this.messageQueue.push({ devId, status, logData, device });
// Acknowledge the message
this.client.ackMessage(message.messageId);
} catch (error) {
console.error('Error receiving message:', error);
console.error('Error processing message:', error);
}
});
this.client.reconnect(() => {
console.log('reconnect');
});
@ -126,37 +80,6 @@ export class TuyaWebSocketService implements OnModuleInit {
console.error('WebSocket error:', error);
});
}
private async processQueue() {
if (this.isProcessing) {
console.log('⏳ Skipping: still processing previous batch');
return;
}
if (this.messageQueue.length === 0) return;
this.isProcessing = true;
const batch = [...this.messageQueue];
this.messageQueue = [];
console.log(`🔁 Processing batch of size: ${batch.length}`);
try {
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb(
batch.map((item) => ({
deviceTuyaUuid: item.devId,
status: item.status,
log: item.logData,
device: item.device,
})),
);
} catch (error) {
console.error('❌ Error processing batch:', error);
this.messageQueue.unshift(...batch); // retry
} finally {
this.isProcessing = false;
}
}
private extractMessageData(message: any): {
devId: string;
status: any;

View File

@ -1,5 +0,0 @@
// Convert time string (HH:mm) to minutes
export function timeToMinutes(time: string): number {
const [hours, minutes] = time.split(':').map(Number);
return hours * 60 + minutes;
}

View File

@ -49,12 +49,12 @@ export class TuyaService {
path,
});
// if (!response.success) {
// throw new HttpException(
// `Error fetching device details: ${response.msg}`,
// HttpStatus.BAD_REQUEST,
// );
// }
if (!response.success) {
throw new HttpException(
`Error fetching device details: ${response.msg}`,
HttpStatus.BAD_REQUEST,
);
}
return response.result;
}

View File

@ -1,26 +1,43 @@
import { utilities as nestWinstonModuleUtilities } from 'nest-winston';
import * as winston from 'winston';
const environment = process.env.NODE_ENV || 'local';
export const winstonLoggerOptions: winston.LoggerOptions = {
level:
process.env.AZURE_POSTGRESQL_DATABASE === 'development' ? 'debug' : 'error',
environment === 'local'
? 'debug'
: environment === 'development'
? 'warn'
: 'error',
transports: [
new winston.transports.Console({
level:
environment === 'local'
? 'debug'
: environment === 'development'
? 'warn'
: 'error',
format: winston.format.combine(
winston.format.timestamp(),
nestWinstonModuleUtilities.format.nestLike('MyApp', {
prettyPrint: true,
prettyPrint: environment === 'local',
}),
),
}),
new winston.transports.File({
filename: 'logs/error.log',
level: 'error',
format: winston.format.json(),
}),
new winston.transports.File({
filename: 'logs/combined.log',
format: winston.format.json(),
}),
// Only create file logs if NOT local
...(environment !== 'local'
? [
new winston.transports.File({
filename: 'logs/error.log',
level: 'error',
format: winston.format.json(),
}),
new winston.transports.File({
filename: 'logs/combined.log',
level: 'info',
format: winston.format.json(),
}),
]
: []),
],
};

View File

@ -1,6 +1,6 @@
import { RoleType } from '@app/common/constants/role.type.enum';
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { IsEnum, IsNotEmpty, IsOptional, IsString } from 'class-validator';
import { IsEnum, IsNotEmpty, IsString } from 'class-validator';
export class InviteUserDto {
@IsString()
@ -12,12 +12,8 @@ export class InviteUserDto {
public email: string;
@IsString()
@IsOptional()
public jobTitle?: string;
@IsString()
@IsOptional()
public companyName?: string;
@IsNotEmpty()
public jobTitle: string;
@IsEnum(UserStatusEnum)
@IsNotEmpty()

View File

@ -8,14 +8,14 @@ import {
Unique,
} from 'typeorm';
import { RoleType } from '@app/common/constants/role.type.enum';
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProjectEntity } from '../../project/entities';
import { RoleTypeEntity } from '../../role-type/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
import { UserEntity } from '../../user/entities';
import { RoleType } from '@app/common/constants/role.type.enum';
import { InviteUserDto, InviteUserSpaceDto } from '../dtos';
import { ProjectEntity } from '../../project/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity({ name: 'invite-user' })
@Unique(['email', 'project'])
@ -37,11 +37,6 @@ export class InviteUserEntity extends AbstractEntity<InviteUserDto> {
})
jobTitle: string;
@Column({
nullable: true,
})
companyName: string;
@Column({
nullable: false,
enum: Object.values(UserStatusEnum),
@ -87,10 +82,7 @@ export class InviteUserEntity extends AbstractEntity<InviteUserDto> {
onDelete: 'CASCADE',
})
public roleType: RoleTypeEntity;
@OneToOne(() => UserEntity, (user) => user.inviteUser, {
nullable: true,
onDelete: 'CASCADE',
})
@OneToOne(() => UserEntity, (user) => user.inviteUser, { nullable: true })
@JoinColumn({ name: 'user_uuid' })
user: UserEntity;
@OneToMany(
@ -120,9 +112,7 @@ export class InviteUserSpaceEntity extends AbstractEntity<InviteUserSpaceDto> {
})
public uuid: string;
@ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces, {
onDelete: 'CASCADE',
})
@ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces)
@JoinColumn({ name: 'invite_user_uuid' })
public inviteUser: InviteUserEntity;

View File

@ -1,11 +0,0 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { AqiSpaceDailyPollutantStatsEntity } from './entities/aqi.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([AqiSpaceDailyPollutantStatsEntity])],
})
export class AqiRepositoryModule {}

View File

@ -1,82 +0,0 @@
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
export class AqiSpaceDailyPollutantStatsDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsNotEmpty()
@IsString()
spaceUuid: string;
@IsNotEmpty()
@IsString()
eventDay: string;
@IsNotEmpty()
@IsNumber()
eventHour: number;
@IsNumber()
pm1Min: number;
@IsNumber()
pm1Avg: number;
@IsNumber()
pm1Max: number;
@IsNumber()
pm10Min: number;
@IsNumber()
pm10Avg: number;
@IsNumber()
pm10Max: number;
@IsNumber()
pm25Min: number;
@IsNumber()
pm25Avg: number;
@IsNumber()
pm25Max: number;
@IsNumber()
ch2oMin: number;
@IsNumber()
ch2oAvg: number;
@IsNumber()
ch2oMax: number;
@IsNumber()
vocMin: number;
@IsNumber()
vocAvg: number;
@IsNumber()
vocMax: number;
@IsNumber()
co2Min: number;
@IsNumber()
co2Avg: number;
@IsNumber()
co2Max: number;
@IsNumber()
aqiMin: number;
@IsNumber()
aqiAvg: number;
@IsNumber()
aqiMax: number;
}

View File

@ -1 +0,0 @@
export * from './aqi.dto';

View File

@ -1,184 +0,0 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { AqiSpaceDailyPollutantStatsDto } from '../dtos';
@Entity({ name: 'space-daily-pollutant-stats' })
@Unique(['spaceUuid', 'eventDate'])
export class AqiSpaceDailyPollutantStatsEntity extends AbstractEntity<AqiSpaceDailyPollutantStatsDto> {
@Column({ nullable: false })
public spaceUuid: string;
@ManyToOne(() => SpaceEntity, (space) => space.aqiSensorDaily)
space: SpaceEntity;
@Column({ type: 'date', nullable: false })
public eventDate: Date;
@Column('float', { nullable: true })
public goodAqiPercentage?: number;
@Column('float', { nullable: true })
public moderateAqiPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveAqiPercentage?: number;
@Column('float', { nullable: true })
public unhealthyAqiPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyAqiPercentage?: number;
@Column('float', { nullable: true })
public hazardousAqiPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgAqi?: number;
@Column('float', { nullable: true })
public dailyMaxAqi?: number;
@Column('float', { nullable: true })
public dailyMinAqi?: number;
@Column('float', { nullable: true })
public goodPm25Percentage?: number;
@Column('float', { nullable: true })
public moderatePm25Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitivePm25Percentage?: number;
@Column('float', { nullable: true })
public unhealthyPm25Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyPm25Percentage?: number;
@Column('float', { nullable: true })
public hazardousPm25Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgPm25?: number;
@Column('float', { nullable: true })
public dailyMaxPm25?: number;
@Column('float', { nullable: true })
public dailyMinPm25?: number;
@Column('float', { nullable: true })
public goodPm10Percentage?: number;
@Column('float', { nullable: true })
public moderatePm10Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitivePm10Percentage?: number;
@Column('float', { nullable: true })
public unhealthyPm10Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyPm10Percentage?: number;
@Column('float', { nullable: true })
public hazardousPm10Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgPm10?: number;
@Column('float', { nullable: true })
public dailyMaxPm10?: number;
@Column('float', { nullable: true })
public dailyMinPm10?: number;
@Column('float', { nullable: true })
public goodVocPercentage?: number;
@Column('float', { nullable: true })
public moderateVocPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveVocPercentage?: number;
@Column('float', { nullable: true })
public unhealthyVocPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyVocPercentage?: number;
@Column('float', { nullable: true })
public hazardousVocPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgVoc?: number;
@Column('float', { nullable: true })
public dailyMaxVoc?: number;
@Column('float', { nullable: true })
public dailyMinVoc?: number;
@Column('float', { nullable: true })
public goodCo2Percentage?: number;
@Column('float', { nullable: true })
public moderateCo2Percentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveCo2Percentage?: number;
@Column('float', { nullable: true })
public unhealthyCo2Percentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyCo2Percentage?: number;
@Column('float', { nullable: true })
public hazardousCo2Percentage?: number;
@Column('float', { nullable: true })
public dailyAvgCo2?: number;
@Column('float', { nullable: true })
public dailyMaxCo2?: number;
@Column('float', { nullable: true })
public dailyMinCo2?: number;
@Column('float', { nullable: true })
public goodCh2oPercentage?: number;
@Column('float', { nullable: true })
public moderateCh2oPercentage?: number;
@Column('float', { nullable: true })
public unhealthySensitiveCh2oPercentage?: number;
@Column('float', { nullable: true })
public unhealthyCh2oPercentage?: number;
@Column('float', { nullable: true })
public veryUnhealthyCh2oPercentage?: number;
@Column('float', { nullable: true })
public hazardousCh2oPercentage?: number;
@Column('float', { nullable: true })
public dailyAvgCh2o?: number;
@Column('float', { nullable: true })
public dailyMaxCh2o?: number;
@Column('float', { nullable: true })
public dailyMinCh2o?: number;
constructor(partial: Partial<AqiSpaceDailyPollutantStatsEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -1 +0,0 @@
export * from './aqi.entity';

View File

@ -1,10 +0,0 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { AqiSpaceDailyPollutantStatsEntity } from '../entities';
@Injectable()
export class AqiSpaceDailyPollutantStatsRepository extends Repository<AqiSpaceDailyPollutantStatsEntity> {
constructor(private dataSource: DataSource) {
super(AqiSpaceDailyPollutantStatsEntity, dataSource.createEntityManager());
}
}

View File

@ -1 +0,0 @@
export * from './aqi.repository';

View File

@ -1,12 +0,0 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { BookableSpaceEntity } from './entities/bookable-space.entity';
import { BookingEntity } from './entities/booking.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([BookableSpaceEntity, BookingEntity])],
})
export class BookingRepositoryModule {}

View File

@ -1,51 +0,0 @@
import { DaysEnum } from '@app/common/constants/days.enum';
import {
Column,
CreateDateColumn,
Entity,
JoinColumn,
OneToOne,
UpdateDateColumn,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity('bookable-space')
export class BookableSpaceEntity extends AbstractEntity {
@Column({
type: 'uuid',
default: () => 'gen_random_uuid()',
nullable: false,
})
public uuid: string;
@OneToOne(() => SpaceEntity, (space) => space.bookableConfig)
@JoinColumn({ name: 'space_uuid' })
space: SpaceEntity;
@Column({
type: 'enum',
enum: DaysEnum,
array: true,
nullable: false,
})
daysAvailable: DaysEnum[];
@Column({ type: 'time' })
startTime: string;
@Column({ type: 'time' })
endTime: string;
@Column({ type: Boolean, default: true })
active: boolean;
@Column({ type: 'int', default: null })
points?: number;
@CreateDateColumn()
createdAt: Date;
@UpdateDateColumn()
updatedAt: Date;
}

View File

@ -1,44 +0,0 @@
import {
Column,
CreateDateColumn,
Entity,
ManyToOne,
UpdateDateColumn,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { UserEntity } from '../../user/entities';
@Entity('booking')
export class BookingEntity extends AbstractEntity {
@Column({
type: 'uuid',
default: () => 'gen_random_uuid()',
nullable: false,
})
public uuid: string;
@ManyToOne(() => SpaceEntity, (space) => space.bookableConfig)
space: SpaceEntity;
@ManyToOne(() => UserEntity, (user) => user.bookings)
user: UserEntity;
@Column({ type: Date, nullable: false })
date: Date;
@Column({ type: 'time' })
startTime: string;
@Column({ type: 'time' })
endTime: string;
@Column({ type: 'int', default: null })
cost?: number;
@CreateDateColumn()
createdAt: Date;
@UpdateDateColumn()
updatedAt: Date;
}

View File

@ -1,10 +0,0 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { BookableSpaceEntity } from '../entities/bookable-space.entity';
@Injectable()
export class BookableSpaceEntityRepository extends Repository<BookableSpaceEntity> {
constructor(private dataSource: DataSource) {
super(BookableSpaceEntity, dataSource.createEntityManager());
}
}

View File

@ -1,10 +0,0 @@
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm';
import { BookingEntity } from '../entities/booking.entity';
@Injectable()
export class BookingEntityRepository extends Repository<BookingEntity> {
constructor(private dataSource: DataSource) {
super(BookingEntity, dataSource.createEntityManager());
}
}

View File

@ -2,15 +2,15 @@ import { SourceType } from '@app/common/constants/source-type.enum';
import { Entity, Column, PrimaryColumn, Unique } from 'typeorm';
@Entity('device-status-log')
@Unique('event_time_idx', ['eventTime', 'deviceId', 'code', 'value'])
@Unique('event_time_idx', ['eventTime'])
export class DeviceStatusLogEntity {
@PrimaryColumn({ type: 'int', generated: true, unsigned: true })
@Column({ type: 'int', generated: true, unsigned: true })
id: number;
@Column({ type: 'text' })
eventId: string;
@Column({ type: 'timestamptz' })
@PrimaryColumn({ type: 'timestamptz' })
eventTime: Date;
@Column({

View File

@ -1,24 +1,24 @@
import {
Column,
Entity,
Index,
JoinColumn,
ManyToOne,
OneToMany,
Unique,
Index,
JoinColumn,
} from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { PermissionTypeEntity } from '../../permission/entities';
import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
import { PresenceSensorDailyDeviceEntity } from '../../presence-sensor/entities';
import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
import { ProductEntity } from '../../product/entities';
import { UserEntity } from '../../user/entities';
import { DeviceNotificationDto } from '../dtos';
import { PermissionTypeEntity } from '../../permission/entities';
import { SceneDeviceEntity } from '../../scene-device/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SubspaceEntity } from '../../space/entities/subspace/subspace.entity';
import { NewTagEntity } from '../../tag';
import { UserEntity } from '../../user/entities';
import { DeviceNotificationDto } from '../dtos';
import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
import { PresenceSensorDailyDeviceEntity } from '../../presence-sensor/entities';
@Entity({ name: 'device' })
@Unique(['deviceTuyaUuid'])
@ -28,11 +28,6 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
})
deviceTuyaUuid: string;
@Column({
nullable: true,
})
deviceTuyaConstUuid: string;
@Column({
nullable: true,
default: true,
@ -83,8 +78,8 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
@OneToMany(() => SceneDeviceEntity, (sceneDevice) => sceneDevice.device, {})
sceneDevices: SceneDeviceEntity[];
@ManyToOne(() => NewTagEntity, (tag) => tag.devices)
@JoinColumn({ name: 'tag_uuid' })
@OneToMany(() => NewTagEntity, (tag) => tag.devices)
// @JoinTable({ name: 'device_tags' })
public tag: NewTagEntity;
@OneToMany(() => PowerClampHourlyEntity, (powerClamp) => powerClamp.device)
powerClampHourly: PowerClampHourlyEntity[];
@ -116,7 +111,6 @@ export class DeviceNotificationEntity extends AbstractEntity<DeviceNotificationD
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity;
@ -155,7 +149,6 @@ export class DeviceUserPermissionEntity extends AbstractEntity<DeviceUserPermiss
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity;
constructor(partial: Partial<DeviceUserPermissionEntity>) {

View File

@ -1 +0,0 @@
export * from './occupancy.dto';

View File

@ -1,23 +0,0 @@
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
export class SpaceDailyOccupancyDurationDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsString()
@IsNotEmpty()
public spaceUuid: string;
@IsString()
@IsNotEmpty()
public eventDate: string;
@IsNumber()
@IsNotEmpty()
public occupancyPercentage: number;
@IsNumber()
@IsNotEmpty()
public occupiedSeconds: number;
}

View File

@ -1 +0,0 @@
export * from './occupancy.entity';

View File

@ -1,32 +0,0 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SpaceDailyOccupancyDurationDto } from '../dtos';
@Entity({ name: 'space-daily-occupancy-duration' })
@Unique(['spaceUuid', 'eventDate'])
export class SpaceDailyOccupancyDurationEntity extends AbstractEntity<SpaceDailyOccupancyDurationDto> {
@Column({ nullable: false })
public spaceUuid: string;
@Column({ nullable: false, type: 'date' })
public eventDate: string;
public CountTotalPresenceDetected: number;
@ManyToOne(() => SpaceEntity, (space) => space.presenceSensorDaily)
space: SpaceEntity;
@Column({ type: 'int' })
occupancyPercentage: number;
@Column({ type: 'int', nullable: true })
occupiedSeconds?: number;
@Column({ type: 'int', nullable: true })
deviceCount?: number;
constructor(partial: Partial<SpaceDailyOccupancyDurationEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -1,11 +0,0 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { SpaceDailyOccupancyDurationEntity } from './entities/occupancy.entity';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([SpaceDailyOccupancyDurationEntity])],
})
export class SpaceDailyOccupancyDurationRepositoryModule {}

View File

@ -1 +0,0 @@
export * from './occupancy.repository';

View File

@ -1,10 +0,0 @@
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { SpaceDailyOccupancyDurationEntity } from '../entities/occupancy.entity';
@Injectable()
export class SpaceDailyOccupancyDurationEntityRepository extends Repository<SpaceDailyOccupancyDurationEntity> {
constructor(private dataSource: DataSource) {
super(SpaceDailyOccupancyDurationEntity, dataSource.createEntityManager());
}
}

View File

@ -1,7 +1,10 @@
import { Column, Entity, OneToMany } from 'typeorm';
import { ProductDto } from '../dtos';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { DeviceEntity } from '../../device/entities';
import { ProductDto } from '../dtos';
import { TagModel } from '../../space-model';
import { TagEntity } from '../../space/entities/tag.entity';
import { NewTagEntity } from '../../tag/entities';
@Entity({ name: 'product' })
export class ProductEntity extends AbstractEntity<ProductDto> {
@Column({
@ -25,6 +28,15 @@ export class ProductEntity extends AbstractEntity<ProductDto> {
})
public prodType: string;
@OneToMany(() => NewTagEntity, (tag) => tag.product, { cascade: true })
public newTags: NewTagEntity[];
@OneToMany(() => TagModel, (tag) => tag.product)
tagModels: TagModel[];
@OneToMany(() => TagEntity, (tag) => tag.product)
tags: TagEntity[];
@OneToMany(
() => DeviceEntity,
(devicesProductEntity) => devicesProductEntity.productDevice,

View File

@ -12,7 +12,6 @@ export class RoleTypeEntity extends AbstractEntity<RoleTypeDto> {
nullable: false,
enum: Object.values(RoleType),
})
// why is this ts-type string not enum?
type: string;
@OneToMany(() => UserEntity, (inviteUser) => inviteUser.roleType, {
nullable: true,

View File

@ -0,0 +1,21 @@
import { IsNotEmpty, IsString } from 'class-validator';
export class TagModelDto {
@IsString()
@IsNotEmpty()
public uuid: string;
@IsString()
@IsNotEmpty()
public name: string;
@IsString()
@IsNotEmpty()
public productUuid: string;
@IsString()
spaceModelUuid: string;
@IsString()
subspaceModelUuid: string;
}

View File

@ -1,3 +1,4 @@
export * from './space-model-product-allocation.entity';
export * from './space-model.entity';
export * from './subspace-model';
export * from './tag-model.entity';
export * from './space-model-product-allocation.entity';

View File

@ -1,12 +1,18 @@
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import {
Entity,
Column,
ManyToOne,
ManyToMany,
JoinTable,
OneToMany,
} from 'typeorm';
import { SpaceModelEntity } from './space-model.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { SpaceProductAllocationEntity } from '../../space/entities/space-product-allocation.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { SpaceModelEntity } from './space-model.entity';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
@Entity({ name: 'space_model_product_allocation' })
@Unique(['spaceModel', 'product', 'tag'])
export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModelProductAllocationEntity> {
@Column({
type: 'uuid',
@ -25,8 +31,9 @@ export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModel
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
@ManyToMany(() => NewTagEntity, { cascade: true, onDelete: 'CASCADE' })
@JoinTable({ name: 'space_model_product_tags' })
public tags: NewTagEntity[];
@OneToMany(
() => SpaceProductAllocationEntity,

View File

@ -1,10 +1,11 @@
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { Entity, Column, OneToMany, ManyToOne, JoinColumn } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProjectEntity } from '../../project/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { SpaceModelDto } from '../dtos';
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
import { SubspaceModelEntity } from './subspace-model';
import { ProjectEntity } from '../../project/entities';
import { TagModel } from './tag-model.entity';
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
import { SpaceEntity } from '../../space/entities/space.entity';
@Entity({ name: 'space-model' })
export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
@ -48,6 +49,9 @@ export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
})
public spaces: SpaceEntity[];
@OneToMany(() => TagModel, (tag) => tag.spaceModel)
tags: TagModel[];
@OneToMany(
() => SpaceModelProductAllocationEntity,
(allocation) => allocation.spaceModel,

View File

@ -1,12 +1,11 @@
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
import { SubspaceModelEntity } from './subspace-model.entity';
import { ProductEntity } from '@app/common/modules/product/entities/product.entity';
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { SubspaceModelProductAllocationDto } from '../../dtos/subspace-model/subspace-model-product-allocation.dto';
import { SubspaceModelEntity } from './subspace-model.entity';
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
@Entity({ name: 'subspace_model_product_allocation' })
@Unique(['subspaceModel', 'product', 'tag'])
export class SubspaceModelProductAllocationEntity extends AbstractEntity<SubspaceModelProductAllocationDto> {
@Column({
type: 'uuid',
@ -28,8 +27,12 @@ export class SubspaceModelProductAllocationEntity extends AbstractEntity<Subspac
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
@ManyToMany(() => NewTagEntity, (tag) => tag.subspaceModelAllocations, {
cascade: true,
onDelete: 'CASCADE',
})
@JoinTable({ name: 'subspace_model_product_tags' })
public tags: NewTagEntity[];
constructor(partial: Partial<SubspaceModelProductAllocationEntity>) {
super();

View File

@ -1,9 +1,10 @@
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
import { Column, Entity, ManyToOne, OneToMany } from 'typeorm';
import { SubSpaceModelDto } from '../../dtos';
import { SpaceModelEntity } from '../space-model.entity';
import { TagModel } from '../tag-model.entity';
import { SubspaceModelProductAllocationEntity } from './subspace-model-product-allocation.entity';
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
@Entity({ name: 'subspace-model' })
export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
@ -40,6 +41,9 @@ export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
})
public disabled: boolean;
@OneToMany(() => TagModel, (tag) => tag.subspaceModel)
tags: TagModel[];
@OneToMany(
() => SubspaceModelProductAllocationEntity,
(allocation) => allocation.subspaceModel,

View File

@ -0,0 +1,38 @@
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { TagModelDto } from '../dtos/tag-model.dto';
import { SpaceModelEntity } from './space-model.entity';
import { SubspaceModelEntity } from './subspace-model';
import { ProductEntity } from '../../product/entities';
import { TagEntity } from '../../space/entities/tag.entity';
@Entity({ name: 'tag_model' })
export class TagModel extends AbstractEntity<TagModelDto> {
@Column({ type: 'varchar', length: 255 })
tag: string;
@ManyToOne(() => ProductEntity, (product) => product.tagModels, {
nullable: false,
})
@JoinColumn({ name: 'product_id' })
product: ProductEntity;
@ManyToOne(() => SpaceModelEntity, (space) => space.tags, { nullable: true })
@JoinColumn({ name: 'space_model_id' })
spaceModel: SpaceModelEntity;
@ManyToOne(() => SubspaceModelEntity, (subspace) => subspace.tags, {
nullable: true,
})
@JoinColumn({ name: 'subspace_model_id' })
subspaceModel: SubspaceModelEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@OneToMany(() => TagEntity, (tag) => tag.model)
tags: TagEntity[];
}

View File

@ -1,10 +1,11 @@
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import {
SpaceModelEntity,
SpaceModelProductAllocationEntity,
SubspaceModelEntity,
SubspaceModelProductAllocationEntity,
TagModel,
} from '../entities';
@Injectable()
@ -20,6 +21,13 @@ export class SubspaceModelRepository extends Repository<SubspaceModelEntity> {
}
}
@Injectable()
export class TagModelRepository extends Repository<TagModel> {
constructor(private dataSource: DataSource) {
super(TagModel, dataSource.createEntityManager());
}
}
@Injectable()
export class SpaceModelProductAllocationRepoitory extends Repository<SpaceModelProductAllocationEntity> {
constructor(private dataSource: DataSource) {

View File

@ -1,11 +1,13 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { SpaceModelEntity, SubspaceModelEntity } from './entities';
import { SpaceModelEntity, SubspaceModelEntity, TagModel } from './entities';
import { Module } from '@nestjs/common';
@Module({
providers: [],
exports: [],
controllers: [],
imports: [TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity])],
imports: [
TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity, TagModel]),
],
})
export class SpaceModelRepositoryModule {}

View File

@ -0,0 +1,32 @@
import { Column, Entity, JoinColumn, ManyToOne } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceEntity } from './space.entity';
import { Direction } from '@app/common/constants/direction.enum';
@Entity({ name: 'space-link' })
export class SpaceLinkEntity extends AbstractEntity {
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
@JoinColumn({ name: 'start_space_id' })
public startSpace: SpaceEntity;
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
@JoinColumn({ name: 'end_space_id' })
public endSpace: SpaceEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@Column({
nullable: false,
enum: Object.values(Direction),
})
direction: string;
constructor(partial: Partial<SpaceLinkEntity>) {
super();
Object.assign(this, partial);
}
}

View File

@ -1,13 +1,12 @@
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { SpaceProductAllocationDto } from '../dtos/space-product-allocation.dto';
import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
import { SpaceEntity } from './space.entity';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { ProductEntity } from '../../product/entities/product.entity';
import { NewTagEntity } from '../../tag/entities/tag.entity';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { SpaceProductAllocationDto } from '../dtos/space-product-allocation.dto';
@Entity({ name: 'space_product_allocation' })
@Unique(['space', 'product', 'tag'], {})
export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAllocationDto> {
@Column({
type: 'uuid',
@ -31,8 +30,9 @@ export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAll
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
@ManyToMany(() => NewTagEntity)
@JoinTable({ name: 'space_product_tags' })
public tags: NewTagEntity[];
constructor(partial: Partial<SpaceProductAllocationEntity>) {
super();

View File

@ -1,26 +1,16 @@
import {
Column,
Entity,
JoinColumn,
ManyToOne,
OneToMany,
OneToOne,
} from 'typeorm';
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { SpaceDto } from '../dtos';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { AqiSpaceDailyPollutantStatsEntity } from '../../aqi/entities';
import { BookableSpaceEntity } from '../../booking/entities/bookable-space.entity';
import { CommunityEntity } from '../../community/entities';
import { UserSpaceEntity } from '../../user/entities';
import { DeviceEntity } from '../../device/entities';
import { InviteUserSpaceEntity } from '../../Invite-user/entities';
import { SpaceDailyOccupancyDurationEntity } from '../../occupancy/entities';
import { PresenceSensorDailySpaceEntity } from '../../presence-sensor/entities';
import { CommunityEntity } from '../../community/entities';
import { SpaceLinkEntity } from './space-link.entity';
import { SceneEntity } from '../../scene/entities';
import { SpaceModelEntity } from '../../space-model';
import { UserSpaceEntity } from '../../user/entities';
import { SpaceDto } from '../dtos';
import { InviteUserSpaceEntity } from '../../Invite-user/entities';
import { SpaceProductAllocationEntity } from './space-product-allocation.entity';
import { SubspaceEntity } from './subspace/subspace.entity';
import { BookingEntity } from '../../booking/entities/booking.entity';
import { PresenceSensorDailySpaceEntity } from '../../presence-sensor/entities';
@Entity({ name: 'space' })
export class SpaceEntity extends AbstractEntity<SpaceDto> {
@ -65,12 +55,6 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
})
public disabled: boolean;
@Column({
nullable: true,
type: Number,
})
public order?: number;
@OneToMany(() => SubspaceEntity, (subspace) => subspace.space, {
nullable: true,
})
@ -89,6 +73,16 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
)
devices: DeviceEntity[];
@OneToMany(() => SpaceLinkEntity, (connection) => connection.startSpace, {
nullable: true,
})
public outgoingConnections: SpaceLinkEntity[];
@OneToMany(() => SpaceLinkEntity, (connection) => connection.endSpace, {
nullable: true,
})
public incomingConnections: SpaceLinkEntity[];
@Column({
nullable: true,
type: 'text',
@ -121,21 +115,6 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
@OneToMany(() => PresenceSensorDailySpaceEntity, (sensor) => sensor.space)
presenceSensorDaily: PresenceSensorDailySpaceEntity[];
@OneToMany(() => AqiSpaceDailyPollutantStatsEntity, (aqi) => aqi.space)
aqiSensorDaily: AqiSpaceDailyPollutantStatsEntity[];
@OneToMany(
() => SpaceDailyOccupancyDurationEntity,
(occupancy) => occupancy.space,
)
occupancyDaily: SpaceDailyOccupancyDurationEntity[];
@OneToOne(() => BookableSpaceEntity, (bookable) => bookable.space)
bookableConfig: BookableSpaceEntity;
@OneToMany(() => BookingEntity, (booking) => booking.space)
bookings: BookingEntity[];
constructor(partial: Partial<SpaceEntity>) {
super();
Object.assign(this, partial);

View File

@ -1,13 +1,20 @@
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import {
Entity,
Column,
ManyToOne,
ManyToMany,
JoinTable,
Unique,
} from 'typeorm';
import { SubspaceEntity } from './subspace.entity';
import { ProductEntity } from '@app/common/modules/product/entities';
import { SubspaceModelProductAllocationEntity } from '@app/common/modules/space-model';
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
import { SubspaceProductAllocationDto } from '../../dtos/subspace-product-allocation.dto';
import { SubspaceEntity } from './subspace.entity';
@Entity({ name: 'subspace_product_allocation' })
@Unique(['subspace', 'product', 'tag'])
@Unique(['subspace', 'product'])
export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProductAllocationDto> {
@Column({
type: 'uuid',
@ -31,8 +38,9 @@ export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProd
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
public product: ProductEntity;
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
public tag: NewTagEntity;
@ManyToMany(() => NewTagEntity)
@JoinTable({ name: 'subspace_product_tags' })
public tags: NewTagEntity[];
constructor(partial: Partial<SubspaceProductAllocationEntity>) {
super();

View File

@ -4,6 +4,7 @@ import { SubspaceModelEntity } from '@app/common/modules/space-model';
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
import { SubspaceDto } from '../../dtos';
import { SpaceEntity } from '../space.entity';
import { TagEntity } from '../tag.entity';
import { SubspaceProductAllocationEntity } from './subspace-product-allocation.entity';
@Entity({ name: 'subspace' })
@ -42,6 +43,9 @@ export class SubspaceEntity extends AbstractEntity<SubspaceDto> {
})
subSpaceModel?: SubspaceModelEntity;
@OneToMany(() => TagEntity, (tag) => tag.subspace)
tags: TagEntity[];
@OneToMany(
() => SubspaceProductAllocationEntity,
(allocation) => allocation.subspace,

View File

@ -0,0 +1,41 @@
import { Entity, Column, ManyToOne, JoinColumn, OneToOne } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ProductEntity } from '../../product/entities';
import { TagDto } from '../dtos';
import { TagModel } from '../../space-model/entities/tag-model.entity';
import { DeviceEntity } from '../../device/entities';
import { SubspaceEntity } from './subspace/subspace.entity';
@Entity({ name: 'tag' })
export class TagEntity extends AbstractEntity<TagDto> {
@Column({ type: 'varchar', length: 255, nullable: true })
tag: string;
@ManyToOne(() => TagModel, (model) => model.tags, {
nullable: true,
})
model: TagModel;
@ManyToOne(() => ProductEntity, (product) => product.tags, {
nullable: false,
})
product: ProductEntity;
@ManyToOne(() => SubspaceEntity, (subspace) => subspace.tags, {
nullable: true,
})
@JoinColumn({ name: 'subspace_id' })
subspace: SubspaceEntity;
@Column({
nullable: false,
default: false,
})
public disabled: boolean;
@OneToOne(() => DeviceEntity, (device) => device.tag, {
nullable: true,
})
@JoinColumn({ name: 'device_id' })
device: DeviceEntity;
}

View File

@ -1,8 +1,10 @@
import { Injectable } from '@nestjs/common';
import { DataSource, Repository } from 'typeorm';
import { Injectable } from '@nestjs/common';
import { InviteSpaceEntity } from '../entities/invite-space.entity';
import { SpaceProductAllocationEntity } from '../entities/space-product-allocation.entity';
import { SpaceLinkEntity } from '../entities/space-link.entity';
import { SpaceEntity } from '../entities/space.entity';
import { TagEntity } from '../entities/tag.entity';
import { SpaceProductAllocationEntity } from '../entities/space-product-allocation.entity';
@Injectable()
export class SpaceRepository extends Repository<SpaceEntity> {
@ -11,6 +13,20 @@ export class SpaceRepository extends Repository<SpaceEntity> {
}
}
@Injectable()
export class SpaceLinkRepository extends Repository<SpaceLinkEntity> {
constructor(private dataSource: DataSource) {
super(SpaceLinkEntity, dataSource.createEntityManager());
}
}
@Injectable()
export class TagRepository extends Repository<TagEntity> {
constructor(private dataSource: DataSource) {
super(TagEntity, dataSource.createEntityManager());
}
}
@Injectable()
export class InviteSpaceRepository extends Repository<InviteSpaceEntity> {
constructor(private dataSource: DataSource) {

View File

@ -6,6 +6,7 @@ import { SpaceProductAllocationEntity } from './entities/space-product-allocatio
import { SpaceEntity } from './entities/space.entity';
import { SubspaceProductAllocationEntity } from './entities/subspace/subspace-product-allocation.entity';
import { SubspaceEntity } from './entities/subspace/subspace.entity';
import { TagEntity } from './entities/tag.entity';
@Module({
providers: [],
@ -15,6 +16,7 @@ import { SubspaceEntity } from './entities/subspace/subspace.entity';
TypeOrmModule.forFeature([
SpaceEntity,
SubspaceEntity,
TagEntity,
InviteSpaceEntity,
SpaceProductAllocationEntity,
SubspaceProductAllocationEntity,

View File

@ -1,10 +1,11 @@
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { DeviceEntity } from '../../device/entities/device.entity';
import { Entity, Column, ManyToOne, Unique, ManyToMany } from 'typeorm';
import { ProductEntity } from '../../product/entities';
import { ProjectEntity } from '../../project/entities';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { NewTagDto } from '../dtos/tag.dto';
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
import { SubspaceModelProductAllocationEntity } from '../../space-model/entities/subspace-model/subspace-model-product-allocation.entity';
import { NewTagDto } from '../dtos/tag.dto';
import { DeviceEntity } from '../../device/entities/device.entity';
@Entity({ name: 'new_tag' })
@Unique(['name', 'project'])
@ -23,25 +24,31 @@ export class NewTagEntity extends AbstractEntity<NewTagDto> {
})
name: string;
@ManyToOne(() => ProductEntity, (product) => product.newTags, {
nullable: false,
onDelete: 'CASCADE',
})
public product: ProductEntity;
@ManyToOne(() => ProjectEntity, (project) => project.tags, {
nullable: false,
onDelete: 'CASCADE',
})
public project: ProjectEntity;
@OneToMany(
@ManyToMany(
() => SpaceModelProductAllocationEntity,
(allocation) => allocation.tag,
(allocation) => allocation.tags,
)
public spaceModelAllocations: SpaceModelProductAllocationEntity[];
@OneToMany(
@ManyToMany(
() => SubspaceModelProductAllocationEntity,
(allocation) => allocation.tag,
(allocation) => allocation.tags,
)
public subspaceModelAllocations: SubspaceModelProductAllocationEntity[];
@OneToMany(() => DeviceEntity, (device) => device.tag)
@ManyToOne(() => DeviceEntity, (device) => device.tag)
public devices: DeviceEntity[];
constructor(partial: Partial<NewTagEntity>) {

View File

@ -1,4 +1,3 @@
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
import {
Column,
DeleteDateColumn,
@ -9,27 +8,27 @@ import {
OneToOne,
Unique,
} from 'typeorm';
import { OtpType } from '../../../../src/constants/otp-type.enum';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { ClientEntity } from '../../client/entities';
import {
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../../device/entities';
import { InviteUserEntity } from '../../Invite-user/entities';
import { ProjectEntity } from '../../project/entities';
import { RegionEntity } from '../../region/entities';
import { RoleTypeEntity } from '../../role-type/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { TimeZoneEntity } from '../../timezone/entities';
import { VisitorPasswordEntity } from '../../visitor-password/entities';
import {
UserDto,
UserNotificationDto,
UserOtpDto,
UserSpaceDto,
} from '../dtos';
import { BookingEntity } from '../../booking/entities/booking.entity';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import {
DeviceNotificationEntity,
DeviceUserPermissionEntity,
} from '../../device/entities';
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
import { RegionEntity } from '../../region/entities';
import { TimeZoneEntity } from '../../timezone/entities';
import { OtpType } from '../../../../src/constants/otp-type.enum';
import { RoleTypeEntity } from '../../role-type/entities';
import { VisitorPasswordEntity } from '../../visitor-password/entities';
import { InviteUserEntity } from '../../Invite-user/entities';
import { ProjectEntity } from '../../project/entities';
import { SpaceEntity } from '../../space/entities/space.entity';
import { ClientEntity } from '../../client/entities';
@Entity({ name: 'user' })
export class UserEntity extends AbstractEntity<UserDto> {
@ -83,12 +82,6 @@ export class UserEntity extends AbstractEntity<UserDto> {
})
public isActive: boolean;
@Column({
nullable: true,
type: Number,
})
public bookingPoints?: number;
@Column({ default: false })
hasAcceptedWebAgreement: boolean;
@ -101,9 +94,7 @@ export class UserEntity extends AbstractEntity<UserDto> {
@Column({ type: 'timestamp', nullable: true })
appAgreementAcceptedAt: Date;
@OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user, {
onDelete: 'CASCADE',
})
@OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user)
userSpaces: UserSpaceEntity[];
@OneToMany(
@ -122,9 +113,6 @@ export class UserEntity extends AbstractEntity<UserDto> {
)
deviceUserNotification: DeviceNotificationEntity[];
@OneToMany(() => BookingEntity, (booking) => booking.user)
bookings: BookingEntity[];
@ManyToOne(() => RegionEntity, (region) => region.users, { nullable: true })
region: RegionEntity;
@ManyToOne(() => TimeZoneEntity, (timezone) => timezone.users, {
@ -170,7 +158,6 @@ export class UserEntity extends AbstractEntity<UserDto> {
export class UserNotificationEntity extends AbstractEntity<UserNotificationDto> {
@ManyToOne(() => UserEntity, (user) => user.roleType, {
nullable: false,
onDelete: 'CASCADE',
})
user: UserEntity;
@Column({
@ -232,10 +219,7 @@ export class UserSpaceEntity extends AbstractEntity<UserSpaceDto> {
})
public uuid: string;
@ManyToOne(() => UserEntity, (user) => user.userSpaces, {
nullable: false,
onDelete: 'CASCADE',
})
@ManyToOne(() => UserEntity, (user) => user.userSpaces, { nullable: false })
user: UserEntity;
@ManyToOne(() => SpaceEntity, (space) => space.userSpaces, {

View File

@ -1,7 +1,7 @@
import { Column, Entity, Index, JoinColumn, ManyToOne } from 'typeorm';
import { Column, Entity, ManyToOne, JoinColumn, Index } from 'typeorm';
import { VisitorPasswordDto } from '../dtos';
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
import { UserEntity } from '../../user/entities/user.entity';
import { VisitorPasswordDto } from '../dtos';
@Entity({ name: 'visitor-password' })
@Index('IDX_PASSWORD_TUYA_UUID', ['passwordTuyaUuid'])
@ -14,7 +14,6 @@ export class VisitorPasswordEntity extends AbstractEntity<VisitorPasswordDto> {
@ManyToOne(() => UserEntity, (user) => user.visitorPasswords, {
nullable: false,
onDelete: 'CASCADE',
})
@JoinColumn({ name: 'authorizer_uuid' })
public user: UserEntity;

View File

@ -1,39 +0,0 @@
WITH params AS (
SELECT
$1::uuid AS space_uuid,
TO_DATE(NULLIF($2, ''), 'YYYY-MM') AS event_month
)
SELECT
sdp.space_uuid,
sdp.event_date,
sdp.good_aqi_percentage, sdp.moderate_aqi_percentage, sdp.unhealthy_sensitive_aqi_percentage, sdp.unhealthy_aqi_percentage,
sdp.very_unhealthy_aqi_percentage, sdp.hazardous_aqi_percentage,
sdp.daily_avg_aqi, sdp.daily_max_aqi, sdp.daily_min_aqi,
sdp.good_pm25_percentage, sdp.moderate_pm25_percentage, sdp.unhealthy_sensitive_pm25_percentage, sdp.unhealthy_pm25_percentage,
sdp.very_unhealthy_pm25_percentage, sdp.hazardous_pm25_percentage,
sdp.daily_avg_pm25, sdp.daily_max_pm25, sdp.daily_min_pm25,
sdp.good_pm10_percentage, sdp.moderate_pm10_percentage, sdp.unhealthy_sensitive_pm10_percentage, sdp.unhealthy_pm10_percentage,
sdp.very_unhealthy_pm10_percentage, sdp.hazardous_pm10_percentage,
sdp.daily_avg_pm10, sdp.daily_max_pm10, sdp.daily_min_pm10,
sdp.good_voc_percentage, sdp.moderate_voc_percentage, sdp.unhealthy_sensitive_voc_percentage, sdp.unhealthy_voc_percentage,
sdp.very_unhealthy_voc_percentage, sdp.hazardous_voc_percentage,
sdp.daily_avg_voc, sdp.daily_max_voc, sdp.daily_min_voc,
sdp.good_co2_percentage, sdp.moderate_co2_percentage, sdp.unhealthy_sensitive_co2_percentage, sdp.unhealthy_co2_percentage,
sdp.very_unhealthy_co2_percentage, sdp.hazardous_co2_percentage,
sdp.daily_avg_co2, sdp.daily_max_co2, sdp.daily_min_co2,
sdp.good_ch2o_percentage, sdp.moderate_ch2o_percentage, sdp.unhealthy_sensitive_ch2o_percentage, sdp.unhealthy_ch2o_percentage,
sdp.very_unhealthy_ch2o_percentage, sdp.hazardous_ch2o_percentage,
sdp.daily_avg_ch2o, sdp.daily_max_ch2o, sdp.daily_min_ch2o
FROM public."space-daily-pollutant-stats" AS sdp
CROSS JOIN params p
WHERE
(p.space_uuid IS NULL OR sdp.space_uuid = p.space_uuid)
AND (p.event_month IS NULL OR TO_CHAR(sdp.event_date, 'YYYY-MM') = TO_CHAR(p.event_month, 'YYYY-MM'))
ORDER BY sdp.space_uuid, sdp.event_date;

View File

@ -1,376 +0,0 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
),
-- Query Pipeline Starts Here
device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
),
final_data as(
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id
AND p.event_date = a.event_date
JOIN params
ON params.event_date = a.event_date
ORDER BY p.space_id, p.event_date)
INSERT INTO public."space-daily-pollutant-stats" (
space_uuid,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
)
SELECT
space_id,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
daily_max_aqi = EXCLUDED.daily_max_aqi,
daily_min_aqi = EXCLUDED.daily_min_aqi,
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
daily_max_pm25 = EXCLUDED.daily_max_pm25,
daily_min_pm25 = EXCLUDED.daily_min_pm25,
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
daily_max_pm10 = EXCLUDED.daily_max_pm10,
daily_min_pm10 = EXCLUDED.daily_min_pm10,
good_voc_percentage = EXCLUDED.good_voc_percentage,
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
daily_avg_voc = EXCLUDED.daily_avg_voc,
daily_max_voc = EXCLUDED.daily_max_voc,
daily_min_voc = EXCLUDED.daily_min_voc,
good_co2_percentage = EXCLUDED.good_co2_percentage,
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
daily_avg_co2 = EXCLUDED.daily_avg_co2,
daily_max_co2 = EXCLUDED.daily_max_co2,
daily_min_co2 = EXCLUDED.daily_min_co2,
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
daily_min_ch2o = EXCLUDED.daily_min_ch2o;

View File

@ -1,367 +0,0 @@
-- Query Pipeline Starts Here
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
),
final_data as(
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date)
INSERT INTO public."space-daily-pollutant-stats" (
space_uuid,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
)
SELECT
space_id,
event_date,
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
daily_avg_voc, daily_max_voc, daily_min_voc,
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
daily_avg_co2, daily_max_co2, daily_min_co2,
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
daily_max_aqi = EXCLUDED.daily_max_aqi,
daily_min_aqi = EXCLUDED.daily_min_aqi,
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
daily_max_pm25 = EXCLUDED.daily_max_pm25,
daily_min_pm25 = EXCLUDED.daily_min_pm25,
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
daily_max_pm10 = EXCLUDED.daily_max_pm10,
daily_min_pm10 = EXCLUDED.daily_min_pm10,
good_voc_percentage = EXCLUDED.good_voc_percentage,
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
daily_avg_voc = EXCLUDED.daily_avg_voc,
daily_max_voc = EXCLUDED.daily_max_voc,
daily_min_voc = EXCLUDED.daily_min_voc,
good_co2_percentage = EXCLUDED.good_co2_percentage,
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
daily_avg_co2 = EXCLUDED.daily_avg_co2,
daily_max_co2 = EXCLUDED.daily_max_co2,
daily_min_co2 = EXCLUDED.daily_min_co2,
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
daily_min_ch2o = EXCLUDED.daily_min_ch2o;

View File

@ -1,94 +1,100 @@
WITH presence_logs AS (
SELECT
d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time,
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value
FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id
JOIN product p ON p.uuid = d.product_device_uuid
WHERE l.code = 'presence_state'
AND p.cat_name = 'hps'
-- Step 1: Get device presence events with previous timestamps
WITH start_date AS (
SELECT
d.uuid AS device_id,
d.space_device_uuid AS space_id,
l.value,
l.event_time::timestamp AS event_time,
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
FROM device d
LEFT JOIN "device-status-log" l
ON d.uuid = l.device_id
LEFT JOIN product p
ON p.uuid = d.product_device_uuid
WHERE p.cat_name = 'hps'
AND l.code = 'presence_state'
),
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none')
presence_intervals AS (
SELECT
space_id,
prev_time AS start_time,
event_time AS end_time
FROM presence_logs
WHERE value = 'none'
AND prev_value = 'presence'
AND prev_time IS NOT NULL
-- Step 2: Identify periods when device reports "none"
device_none_periods AS (
SELECT
space_id,
device_id,
event_time AS empty_from,
LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
FROM start_date
WHERE value = 'none'
),
-- Split intervals across days
split_intervals AS (
SELECT
space_id,
generate_series(
date_trunc('day', start_time),
date_trunc('day', end_time),
interval '1 day'
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
FROM presence_intervals
-- Step 3: Clip the "none" periods to the edges of each day
clipped_device_none_periods AS (
SELECT
space_id,
GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
FROM device_none_periods
WHERE empty_until IS NOT NULL
),
-- Mark and group overlapping intervals per space per day
ordered_intervals AS (
SELECT
space_id,
event_date,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
FROM split_intervals
-- Step 4: Break multi-day periods into daily intervals
generated_daily_intervals AS (
SELECT
space_id,
gs::date AS day,
GREATEST(clipped_from, gs) AS interval_start,
LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
FROM clipped_device_none_periods,
LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
),
grouped_intervals AS (
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
),
-- Merge overlapping intervals per group
-- Step 5: Merge overlapping or adjacent intervals per day
merged_intervals AS (
SELECT
space_id,
event_date,
MIN(interval_start) AS merged_start,
MAX(interval_end) AS merged_end
FROM grouped_intervals
GROUP BY space_id, event_date, grp
SELECT
space_id,
day,
interval_start,
interval_end
FROM (
SELECT
space_id,
day,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
FROM generated_daily_intervals
) sub
WHERE prev_end IS NULL OR interval_start > prev_end
),
-- Sum durations of merged intervals
summed_intervals AS (
SELECT
space_id,
event_date,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
FROM merged_intervals
GROUP BY space_id, event_date
),
-- Step 6: Sum up total missing seconds (device reported "none") per day
missing_seconds_per_day AS (
SELECT
space_id,
day AS missing_date,
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
FROM merged_intervals
GROUP BY space_id, day
),
final_data AS (
SELECT
space_id,
event_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
FROM summed_intervals
ORDER BY space_id, event_date)
-- Step 7: Calculate total occupied time per day (86400 - missing)
occupied_seconds_per_day AS (
SELECT
space_id,
missing_date as event_date,
86400 - total_missing_seconds AS total_occupied_seconds,
(86400 - total_missing_seconds)/86400*100 as occupancy_prct
FROM missing_seconds_per_day
)
-- Final Output
, final_data as (
SELECT space_id,
event_date,
total_occupied_seconds,
occupancy_prct
FROM occupied_seconds_per_day
ORDER BY 1,2
)
INSERT INTO public."space-daily-occupancy-duration" (
space_uuid,
@ -98,13 +104,12 @@ INSERT INTO public."space-daily-occupancy-duration" (
)
select space_id,
event_date,
occupied_seconds,
occupancy_percentage
total_occupied_seconds,
occupancy_prct
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
occupancy_percentage = EXCLUDED.occupancy_percentage,
occupied_seconds = EXCLUDED.occupied_seconds;
occupancy_percentage = EXCLUDED.occupancy_percentage;

View File

@ -1,107 +1,117 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date,
$2::uuid AS space_id
)
, start_date AS (
SELECT
d.uuid AS device_id,
d.space_device_uuid AS space_id,
l.value,
l.event_time::timestamp AS event_time,
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
FROM device d
LEFT JOIN "device-status-log" l
ON d.uuid = l.device_id
LEFT JOIN product p
ON p.uuid = d.product_device_uuid
WHERE p.cat_name = 'hps'
AND l.code = 'presence_state'
),
presence_logs AS (
SELECT
d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time
FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id
JOIN product p ON p.uuid = d.product_device_uuid
WHERE l.code = 'presence_state'
AND p.cat_name = 'hps'
-- Step 2: Identify periods when device reports "none"
device_none_periods AS (
SELECT
space_id,
device_id,
event_time AS empty_from,
LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
FROM start_date
WHERE value = 'none'
),
presence_intervals AS (
SELECT
space_id,
prev_time AS start_time,
event_time AS end_time
FROM presence_logs
WHERE value = 'none' AND prev_time IS NOT NULL
-- Step 3: Clip the "none" periods to the edges of each day
clipped_device_none_periods AS (
SELECT
space_id,
GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
FROM device_none_periods
WHERE empty_until IS NOT NULL
),
split_intervals AS (
SELECT
space_id,
generate_series(
date_trunc('day', start_time),
date_trunc('day', end_time),
interval '1 day'
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + INTERVAL '1 day') AS interval_end
FROM presence_intervals
),
ordered_intervals AS (
SELECT
space_id,
event_date,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
FROM split_intervals
),
grouped_intervals AS (
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
-- Step 4: Break multi-day periods into daily intervals
generated_daily_intervals AS (
SELECT
space_id,
gs::date AS day,
GREATEST(clipped_from, gs) AS interval_start,
LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
FROM clipped_device_none_periods,
LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
),
-- Step 5: Merge overlapping or adjacent intervals per day
merged_intervals AS (
SELECT
space_id,
event_date,
MIN(interval_start) AS merged_start,
MAX(interval_end) AS merged_end
FROM grouped_intervals
GROUP BY space_id, event_date, grp
SELECT
space_id,
day,
interval_start,
interval_end
FROM (
SELECT
space_id,
day,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
FROM generated_daily_intervals
) sub
WHERE prev_end IS NULL OR interval_start > prev_end
),
summed_intervals AS (
SELECT
space_id,
event_date,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
FROM merged_intervals
GROUP BY space_id, event_date
-- Step 6: Sum up total missing seconds (device reported "none") per day
missing_seconds_per_day AS (
SELECT
space_id,
day AS missing_date,
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
FROM merged_intervals
GROUP BY space_id, day
),
final_data AS (
SELECT
s.space_id,
s.event_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
FROM summed_intervals s
JOIN params p
ON p.event_date = s.event_date
-- Step 7: Calculate total occupied time per day (86400 - missing)
occupied_seconds_per_day AS (
SELECT
space_id,
missing_date as event_date,
86400 - total_missing_seconds AS total_occupied_seconds,
(86400 - total_missing_seconds)/86400*100 as occupancy_percentage
FROM missing_seconds_per_day
)
-- Final Output
, final_data as (
SELECT occupied_seconds_per_day.space_id,
occupied_seconds_per_day.event_date,
occupied_seconds_per_day.occupancy_percentage
FROM occupied_seconds_per_day
join params p on true
and p.space_id = occupied_seconds_per_day.space_id
and p.event_date = occupied_seconds_per_day.event_date
ORDER BY 1,2
)
INSERT INTO public."space-daily-occupancy-duration" (
space_uuid,
event_date,
occupied_seconds,
occupancy_percentage
)
SELECT
space_id,
event_date,
occupied_seconds,
occupancy_percentage
select space_id,
event_date,
occupancy_percentage
FROM final_data
ON CONFLICT (space_uuid, event_date) DO UPDATE
SET
occupancy_percentage = EXCLUDED.occupancy_percentage,
occupied_seconds = EXCLUDED.occupied_seconds;
occupancy_percentage = EXCLUDED.occupancy_percentage;

View File

@ -1,6 +1,7 @@
WITH params AS (
SELECT
$1::date AS target_date
$1::uuid AS device_id,
$2::date AS target_date
),
total_energy AS (
SELECT
@ -13,7 +14,8 @@ total_energy AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed'
AND log.event_time::date = params.target_date
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
energy_phase_A AS (
@ -27,7 +29,8 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA'
AND log.event_time::date = params.target_date
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
energy_phase_B AS (
@ -41,7 +44,8 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB'
AND log.event_time::date = params.target_date
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
energy_phase_C AS (
@ -55,7 +59,8 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC'
AND log.event_time::date = params.target_date
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
GROUP BY 1,2,3,4,5
),
final_data AS (

View File

@ -1,6 +1,8 @@
WITH params AS (
SELECT
$1::date AS target_date
$1::uuid AS device_id,
$2::date AS target_date,
$3::text AS target_hour
),
total_energy AS (
SELECT
@ -13,7 +15,9 @@ total_energy AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
energy_phase_A AS (
@ -27,7 +31,9 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
energy_phase_B AS (
@ -41,7 +47,9 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
energy_phase_C AS (
@ -55,7 +63,9 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND log.event_time::date = params.target_date
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
GROUP BY 1,2,3,4,5
),
final_data AS (

View File

@ -1,6 +1,7 @@
WITH params AS (
SELECT
$1::text AS target_month -- Format should match 'MM-YYYY'
$1::uuid AS device_id,
$2::text AS target_month -- Format should match 'MM-YYYY'
),
total_energy AS (
SELECT
@ -13,6 +14,7 @@ total_energy AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumed'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),
@ -27,6 +29,7 @@ energy_phase_A AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedA'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),
@ -41,6 +44,7 @@ energy_phase_B AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedB'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),
@ -55,6 +59,7 @@ energy_phase_C AS (
MAX(log.value)::integer AS max_value
FROM "device-status-log" log, params
WHERE log.code = 'EnergyConsumedC'
AND log.device_id = params.device_id
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
GROUP BY 1,2,3,4,5
),

View File

@ -16,5 +16,4 @@ WITH params AS (
WHERE A.device_uuid::text = ANY(P.device_ids)
AND (P.month IS NULL
OR date_trunc('month', A.event_date) = P.month
);
)

View File

@ -1,6 +1,7 @@
WITH params AS (
SELECT
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date,
$2::uuid AS space_id
),
device_logs AS (
@ -86,7 +87,8 @@ SELECT summary.space_id,
count_total_presence_detected
FROM summary
JOIN params P ON true
where (P.event_date IS NULL or summary.event_date::date = P.event_date)
where summary.space_id = P.space_id
and (P.event_date IS NULL or summary.event_date::date = P.event_date)
ORDER BY space_id, event_date)

View File

@ -26,337 +26,119 @@ BEGIN
('pm10', 255, 354, 151, 200),
-- VOC
('voc', 0, 200, 0, 50),
('voc', 201, 400, 51, 100),
('voc', 401, 600, 101, 150),
('voc', 601, 1000, 151, 200),
('voc_value', 0, 200, 0, 50),
('voc_value', 201, 400, 51, 100),
('voc_value', 401, 600, 101, 150),
('voc_value', 601, 1000, 151, 200),
-- CH2O
('ch2o', 0, 2, 0, 50),
('ch2o', 2.1, 4, 51, 100),
('ch2o', 4.1, 6, 101, 150),
('ch2o_value', 0, 2, 0, 50),
('ch2o_value', 2.1, 4, 51, 100),
('ch2o_value', 4.1, 6, 101, 150),
-- CO2
('co2', 350, 1000, 0, 50),
('co2', 1001, 1250, 51, 100),
('co2', 1251, 1500, 101, 150),
('co2', 1501, 2000, 151, 200)
('co2_value', 350, 1000, 0, 50),
('co2_value', 1001, 1250, 51, 100),
('co2_value', 1251, 1500, 101, 150),
('co2_value', 1501, 2000, 151, 200)
) AS v(pollutant, c_low, c_high, i_low, i_high)
WHERE v.pollutant = LOWER(p_pollutant)
AND concentration BETWEEN v.c_low AND v.c_high
LIMIT 1;
-- Linear interpolation
RETURN ROUND(((i_high - i_low) * (concentration - c_low) / (c_high - c_low)) + i_low);
END;
$$ LANGUAGE plpgsql;
-- Function to classify AQI
CREATE OR REPLACE FUNCTION classify_aqi(aqi NUMERIC)
RETURNS TEXT AS $$
BEGIN
RETURN CASE
WHEN aqi BETWEEN 0 AND 50 THEN 'Good'
WHEN aqi BETWEEN 51 AND 100 THEN 'Moderate'
WHEN aqi BETWEEN 101 AND 150 THEN 'Unhealthy for Sensitive Groups'
WHEN aqi BETWEEN 151 AND 200 THEN 'Unhealthy'
WHEN aqi BETWEEN 201 AND 300 THEN 'Very Unhealthy'
WHEN aqi >= 301 THEN 'Hazardous'
ELSE NULL
END;
END;
$$ LANGUAGE plpgsql;
-- Function to convert AQI level string to number
CREATE OR REPLACE FUNCTION level_to_numeric(level_text TEXT)
RETURNS NUMERIC AS $$
BEGIN
RETURN CAST(regexp_replace(level_text, '[^0-9]', '', 'g') AS NUMERIC);
EXCEPTION WHEN others THEN
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
-- Query Pipeline Starts Here
-- CTE for device + status log + space
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.created_at,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".event_id,
"device-status-log".event_time::date,
"device-status-log".code,
"device-status-log".value
"device-status-log".value,
"device-status-log".log
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
-- Aggregate air sensor data per device per day
air_data AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
event_time AS date,
device_id,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
-- PM2.5
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
-- Humidity
MIN(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_low,
MAX(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_high,
AVG(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_avg,
-- Temperature
MIN(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_low,
MAX(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_high,
AVG(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_avg,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg
FROM device_space
GROUP BY device_id, space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
device_id,
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT device_id, space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, aqi_category
UNION ALL
SELECT device_id, space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, pm25_category
UNION ALL
SELECT device_id, space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, pm10_category
UNION ALL
SELECT device_id, space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, voc_category
UNION ALL
SELECT device_id, space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, co2_category
UNION ALL
SELECT device_id, space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY device_id, space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
device_id,
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY device_id, space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.device_id,
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.device_id = dcc.device_id AND dt.event_date = dcc.event_date
GROUP BY dt.device_id, dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
device_id,
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY device_id, space_id, event_date
GROUP BY date, device_id, space_id
)
SELECT
p.device_id,
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.device_id = a.device_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date;
-- Final select with AQI calculation
SELECT
date,
device_id,
space_id,
voc_min, voc_max, voc_avg,
pm1_min, pm1_max, pm1_avg,
pm25_min, pm25_max, pm25_avg,
pm10_min, pm10_max, pm10_avg,
ch2o_min, ch2o_max, ch2o_avg,
humidity_low, humidity_high, humidity_avg,
temp_low, temp_high, temp_avg,
co2_min, co2_max, co2_avg,
GREATEST(
calculate_aqi('pm25', pm25_avg),
calculate_aqi('pm10', pm10_avg),
calculate_aqi('voc_value', voc_avg),
calculate_aqi('co2_value', co2_avg),
calculate_aqi('ch2o_value', ch2o_avg)
) AS overall_AQI
FROM air_data;

View File

@ -1,275 +0,0 @@
-- Query Pipeline Starts Here
WITH device_space AS (
SELECT
device.uuid AS device_id,
device.space_device_uuid AS space_id,
"device-status-log".event_time::timestamp AS event_time,
"device-status-log".code,
"device-status-log".value
FROM device
LEFT JOIN "device-status-log"
ON device.uuid = "device-status-log".device_id
LEFT JOIN product
ON product.uuid = device.product_device_uuid
WHERE product.cat_name = 'hjjcy'
),
average_pollutants AS (
SELECT
event_time::date AS event_date,
date_trunc('hour', event_time) AS event_hour,
space_id,
-- PM1
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
-- PM25
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
-- PM10
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
-- VOC
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
-- CH2O
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
-- CO2
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
FROM device_space
GROUP BY space_id, event_hour, event_date
),
filled_pollutants AS (
SELECT
*,
-- AVG
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
-- MIN
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
-- MAX
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
FROM average_pollutants
),
hourly_results AS (
SELECT
space_id,
event_date,
event_hour,
pm1_min, pm1_avg, pm1_max,
pm25_min_f, pm25_avg_f, pm25_max_f,
pm10_min_f, pm10_avg_f, pm10_max_f,
voc_min_f, voc_avg_f, voc_max_f,
co2_min_f, co2_avg_f, co2_max_f,
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
GREATEST(
calculate_aqi('pm25', pm25_min_f),
calculate_aqi('pm10', pm10_min_f)
) AS hourly_min_aqi,
GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
) AS hourly_avg_aqi,
GREATEST(
calculate_aqi('pm25', pm25_max_f),
calculate_aqi('pm10', pm10_max_f)
) AS hourly_max_aqi,
classify_aqi(GREATEST(
calculate_aqi('pm25', pm25_avg_f),
calculate_aqi('pm10', pm10_avg_f)
)) AS aqi_category,
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
FROM filled_pollutants
),
daily_category_counts AS (
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, aqi_category
UNION ALL
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm25_category
UNION ALL
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, pm10_category
UNION ALL
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, voc_category
UNION ALL
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, co2_category
UNION ALL
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
FROM hourly_results
GROUP BY space_id, event_date, ch2o_category
),
daily_totals AS (
SELECT
space_id,
event_date,
SUM(category_count) AS total_count
FROM daily_category_counts
where pollutant = 'aqi'
GROUP BY space_id, event_date
),
-- Pivot Categories into Columns
daily_percentages AS (
select
dt.space_id,
dt.event_date,
-- AQI CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
-- PM25 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
-- PM10 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
-- VOC CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
-- CO2 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
-- CH20 CATEGORIES
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
FROM daily_totals dt
LEFT JOIN daily_category_counts dcc
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
GROUP BY dt.space_id, dt.event_date, dt.total_count
),
daily_averages AS (
SELECT
space_id,
event_date,
-- AQI
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
-- PM25
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
-- PM10
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
-- VOC
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
-- CO2
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
-- CH2O
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
FROM hourly_results
GROUP BY space_id, event_date
)
SELECT
p.space_id,
p.event_date,
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
FROM daily_percentages p
LEFT JOIN daily_averages a
ON p.space_id = a.space_id AND p.event_date = a.event_date
ORDER BY p.space_id, p.event_date;

View File

@ -1,90 +1,91 @@
WITH presence_logs AS (
SELECT
d.space_device_uuid AS space_id,
l.device_id,
l.event_time,
l.value,
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time,
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value
FROM device d
JOIN "device-status-log" l ON d.uuid = l.device_id
JOIN product p ON p.uuid = d.product_device_uuid
WHERE l.code = 'presence_state'
AND p.cat_name = 'hps'
-- Step 1: Get device presence events with previous timestamps
WITH start_date AS (
SELECT
d.uuid AS device_id,
d.space_device_uuid AS space_id,
l.value,
l.event_time::timestamp AS event_time,
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
FROM device d
LEFT JOIN "device-status-log" l
ON d.uuid = l.device_id
LEFT JOIN product p
ON p.uuid = d.product_device_uuid
WHERE p.cat_name = 'hps'
AND l.code = 'presence_state'
),
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none')
presence_intervals AS (
SELECT
space_id,
prev_time AS start_time,
event_time AS end_time
FROM presence_logs
WHERE value = 'none'
AND prev_value = 'presence'
AND prev_time IS NOT NULL
-- Step 2: Identify periods when device reports "none"
device_none_periods AS (
SELECT
space_id,
device_id,
event_time AS empty_from,
LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
FROM start_date
WHERE value = 'none'
),
-- Split intervals across days
split_intervals AS (
SELECT
space_id,
generate_series(
date_trunc('day', start_time),
date_trunc('day', end_time),
interval '1 day'
)::date AS event_date,
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
FROM presence_intervals
-- Step 3: Clip the "none" periods to the edges of each day
clipped_device_none_periods AS (
SELECT
space_id,
GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
FROM device_none_periods
WHERE empty_until IS NOT NULL
),
-- Mark and group overlapping intervals per space per day
ordered_intervals AS (
SELECT
space_id,
event_date,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
FROM split_intervals
-- Step 4: Break multi-day periods into daily intervals
generated_daily_intervals AS (
SELECT
space_id,
gs::date AS day,
GREATEST(clipped_from, gs) AS interval_start,
LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
FROM clipped_device_none_periods,
LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
),
grouped_intervals AS (
SELECT *,
SUM(CASE
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
ELSE 0
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
FROM ordered_intervals
),
-- Merge overlapping intervals per group
-- Step 5: Merge overlapping or adjacent intervals per day
merged_intervals AS (
SELECT
space_id,
event_date,
MIN(interval_start) AS merged_start,
MAX(interval_end) AS merged_end
FROM grouped_intervals
GROUP BY space_id, event_date, grp
SELECT
space_id,
day,
interval_start,
interval_end
FROM (
SELECT
space_id,
day,
interval_start,
interval_end,
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
FROM generated_daily_intervals
) sub
WHERE prev_end IS NULL OR interval_start > prev_end
),
-- Sum durations of merged intervals
summed_intervals AS (
SELECT
space_id,
event_date,
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
FROM merged_intervals
GROUP BY space_id, event_date
-- Step 6: Sum up total missing seconds (device reported "none") per day
missing_seconds_per_day AS (
SELECT
space_id,
day AS missing_date,
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
FROM merged_intervals
GROUP BY space_id, day
),
-- Step 7: Calculate total occupied time per day (86400 - missing)
occupied_seconds_per_day AS (
SELECT
space_id,
missing_date as date,
86400 - total_missing_seconds AS total_occupied_seconds
FROM missing_seconds_per_day
)
-- Final output with capped seconds and percentage
SELECT
space_id,
event_date,
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
FROM summed_intervals
ORDER BY space_id, event_date;
-- Final Output
SELECT *
FROM occupied_seconds_per_day
ORDER BY 1,2;

View File

@ -1,18 +0,0 @@
export function calculateAQI(pm2_5: number): number {
const breakpoints = [
{ pmLow: 0.0, pmHigh: 12.0, aqiLow: 0, aqiHigh: 50 },
{ pmLow: 12.1, pmHigh: 35.4, aqiLow: 51, aqiHigh: 100 },
{ pmLow: 35.5, pmHigh: 55.4, aqiLow: 101, aqiHigh: 150 },
{ pmLow: 55.5, pmHigh: 150.4, aqiLow: 151, aqiHigh: 200 },
{ pmLow: 150.5, pmHigh: 250.4, aqiLow: 201, aqiHigh: 300 },
{ pmLow: 250.5, pmHigh: 500.4, aqiLow: 301, aqiHigh: 500 },
];
const bp = breakpoints.find((b) => pm2_5 >= b.pmLow && pm2_5 <= b.pmHigh);
if (!bp) return pm2_5 > 500.4 ? 500 : 0; // Handle out-of-range values
return Math.round(
((bp.aqiHigh - bp.aqiLow) / (bp.pmHigh - bp.pmLow)) * (pm2_5 - bp.pmLow) +
bp.aqiLow,
);
}

View File

@ -1,11 +0,0 @@
import { DeviceEntity } from '../modules/device/entities';
export function addSpaceUuidToDevices(
devices: DeviceEntity[],
spaceUuid: string,
): DeviceEntity[] {
return devices.map((device) => {
(device as any).spaceUuid = spaceUuid;
return device;
});
}

View File

@ -1,15 +1,15 @@
import { HttpException, HttpStatus, Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import * as nodemailer from 'nodemailer';
import axios from 'axios';
import nodemailer from 'nodemailer';
import Mail from 'nodemailer/lib/mailer';
import {
SEND_EMAIL_API_URL_DEV,
SEND_EMAIL_API_URL_PROD,
} from '../constants/mail-trap';
@Injectable()
export class EmailService {
private smtpConfig: any;
private API_TOKEN: string;
private SEND_EMAIL_API_URL: string;
private BATCH_EMAIL_API_URL: string;
constructor(private readonly configService: ConfigService) {
this.smtpConfig = {
@ -22,15 +22,6 @@ export class EmailService {
pass: this.configService.get<string>('email-config.SMTP_PASSWORD'),
},
};
this.API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
this.SEND_EMAIL_API_URL = this.configService.get<string>(
'email-config.SEND_EMAIL_API_URL',
);
this.BATCH_EMAIL_API_URL = this.configService.get<string>(
'email-config.BATCH_EMAIL_API_URL',
);
}
async sendEmail(
@ -40,7 +31,7 @@ export class EmailService {
): Promise<void> {
const transporter = nodemailer.createTransport(this.smtpConfig);
const mailOptions: Mail.Options = {
const mailOptions = {
from: this.smtpConfig.sender,
to: email,
subject,
@ -53,6 +44,13 @@ export class EmailService {
email: string,
emailInvitationData: any,
): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_INVITATION_TEMPLATE_UUID',
);
@ -70,23 +68,35 @@ export class EmailService {
template_variables: emailInvitationData,
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: false,
});
try {
await axios.post(API_URL, emailData, {
headers: {
Authorization: `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json',
},
});
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async sendEmailWithTemplate(
email: string,
name: string,
isEnable: boolean,
isDelete: boolean,
): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
async sendEmailWithTemplate({
email,
name,
isEnable,
isDelete,
}: {
email: string;
name: string;
isEnable: boolean;
isDelete: boolean;
}): Promise<void> {
// Determine the template UUID based on the arguments
const templateUuid = isDelete
? this.configService.get<string>(
@ -113,16 +123,32 @@ export class EmailService {
},
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: false,
});
try {
await axios.post(API_URL, emailData, {
headers: {
Authorization: `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json',
},
});
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async sendEditUserEmailWithTemplate(
email: string,
emailEditData: any,
): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_EDIT_USER_TEMPLATE_UUID',
);
@ -140,15 +166,32 @@ export class EmailService {
template_variables: emailEditData,
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: false,
});
try {
await axios.post(API_URL, emailData, {
headers: {
Authorization: `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json',
},
});
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async sendOtpEmailWithTemplate(
email: string,
emailEditData: any,
): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
const API_TOKEN = this.configService.get<string>(
'email-config.MAILTRAP_API_TOKEN',
);
const API_URL = isProduction
? SEND_EMAIL_API_URL_PROD
: SEND_EMAIL_API_URL_DEV;
const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_SEND_OTP_TEMPLATE_UUID',
);
@ -166,84 +209,20 @@ export class EmailService {
template_variables: emailEditData,
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: false,
});
}
async sendUpdateBookingTimingEmailWithTemplate(
emails: {
email: string;
name: string;
bookings: {
date: string;
start_time: string;
end_time: string;
}[];
}[],
emailVariables: {
space_name: string;
days: string;
start_time: string;
end_time: string;
},
): Promise<void> {
const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID',
);
const emailData = {
base: {
from: {
email: this.smtpConfig.sender,
try {
await axios.post(API_URL, emailData, {
headers: {
Authorization: `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json',
},
template_uuid: TEMPLATE_UUID,
},
requests: emails.map(({ email, name, bookings }) => ({
to: [{ email }],
template_variables: { ...emailVariables, name, bookings },
})),
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: true,
});
}
async sendUpdateBookingAvailabilityEmailWithTemplate(
emails: { email: string; name: string }[],
emailVariables: {
space_name: string;
availability: string;
isAvailable: boolean;
},
): Promise<void> {
const TEMPLATE_UUID = this.configService.get<string>(
'email-config.MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID',
);
const emailData = {
base: {
from: {
email: this.smtpConfig.sender,
},
template_uuid: TEMPLATE_UUID,
},
requests: emails.map(({ email, name }) => ({
to: [{ email }],
template_variables: {
...emailVariables,
name,
},
})),
};
return this.sendEmailWithTemplateV2({
...emailData,
isBatch: true,
});
});
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
generateUserChangesEmailBody(
addedSpaceNames: string[],
@ -280,47 +259,4 @@ export class EmailService {
nameChanged,
};
}
private async sendEmailWithTemplateV2({
isBatch,
...emailData
}: BatchEmailData | SingleEmailData): Promise<void> {
try {
await axios.post(
isBatch ? this.BATCH_EMAIL_API_URL : this.SEND_EMAIL_API_URL,
{
...emailData,
},
{
headers: {
Authorization: `Bearer ${this.API_TOKEN}`,
'Content-Type': 'application/json',
},
},
);
} catch (error) {
throw new HttpException(
error.response?.data?.message ||
'Error sending email using Mailtrap template',
error.response?.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
}
interface BatchEmailData {
base: { from: { email: string }; template_uuid: string };
requests: Array<{
to: { email: string }[];
template_variables: Record<string, any>;
}>;
isBatch: true;
}
interface SingleEmailData {
from: { email: string };
to: { email: string }[];
template_uuid: string;
template_variables?: Record<string, any>;
isBatch: false;
}

View File

@ -1,7 +0,0 @@
import { format, parse } from 'date-fns';
export function to12HourFormat(timeString: string): string {
timeString = timeString.padEnd(8, ':00');
const parsedTime = parse(timeString, 'HH:mm:ss', new Date());
return format(parsedTime, 'hh:mm a');
}

3994
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,6 @@
"format": "prettier --write \"apps/**/*.ts\" \"libs/**/*.ts\"",
"start": "npm run test && node dist/main",
"start:dev": "npm run test && npx nest start --watch",
"dev": "npx nest start --watch",
"start:debug": "npm run test && npx nest start --debug --watch",
"start:prod": "npm run test && node dist/main",
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
@ -30,7 +29,6 @@
"@nestjs/jwt": "^10.2.0",
"@nestjs/passport": "^10.0.3",
"@nestjs/platform-express": "^10.0.0",
"@nestjs/schedule": "^6.0.0",
"@nestjs/swagger": "^7.3.0",
"@nestjs/terminus": "^11.0.0",
"@nestjs/throttler": "^6.4.0",
@ -44,7 +42,6 @@
"class-validator": "^0.14.1",
"crypto-js": "^4.2.0",
"csv-parser": "^3.2.0",
"date-fns": "^4.1.0",
"express-rate-limit": "^7.1.5",
"firebase": "^10.12.5",
"google-auth-library": "^9.14.1",
@ -52,12 +49,11 @@
"ioredis": "^5.3.2",
"morgan": "^1.10.0",
"nest-winston": "^1.10.2",
"node-cache": "^5.1.2",
"nodemailer": "^7.0.5",
"nodemailer": "^6.9.10",
"onesignal-node": "^3.4.0",
"passport-jwt": "^4.0.1",
"pg": "^8.11.3",
"reflect-metadata": "^0.2.2",
"reflect-metadata": "^0.2.0",
"rxjs": "^7.8.1",
"typeorm": "^0.3.20",
"winston": "^3.17.0",
@ -73,7 +69,6 @@
"@types/jest": "^29.5.2",
"@types/multer": "^1.4.12",
"@types/node": "^20.3.1",
"@types/nodemailer": "^6.4.17",
"@types/supertest": "^6.0.0",
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0",
@ -91,9 +86,5 @@
"ts-node": "^10.9.1",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.1.3"
},
"engines": {
"node": "20.x",
"npm": "10.x"
}
}

View File

@ -1,67 +1,51 @@
import { SeederModule } from '@app/common/seed/seeder.module';
import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
import { WinstonModule } from 'nest-winston';
import { AuthenticationModule } from './auth/auth.module';
import { AutomationModule } from './automation/automation.module';
import { ClientModule } from './client/client.module';
import { DeviceCommissionModule } from './commission-device/commission-device.module';
import { CommunityModule } from './community/community.module';
import config from './config';
import { DeviceMessagesSubscriptionModule } from './device-messages/device-messages.module';
import { DeviceModule } from './device/device.module';
import { DoorLockModule } from './door-lock/door.lock.module';
import { AuthenticationModule } from './auth/auth.module';
import { UserModule } from './users/user.module';
import { GroupModule } from './group/group.module';
import { HealthModule } from './health/health.module';
import { DeviceModule } from './device/device.module';
import { UserDevicePermissionModule } from './user-device-permission/user-device-permission.module';
import { CommunityModule } from './community/community.module';
import { SeederModule } from '@app/common/seed/seeder.module';
import { UserNotificationModule } from './user-notification/user-notification.module';
import { DeviceMessagesSubscriptionModule } from './device-messages/device-messages.module';
import { SceneModule } from './scene/scene.module';
import { DoorLockModule } from './door-lock/door.lock.module';
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
import { LoggingInterceptor } from './interceptors/logging.interceptor';
import { InviteUserModule } from './invite-user/invite-user.module';
import { PermissionModule } from './permission/permission.module';
import { PowerClampModule } from './power-clamp/power-clamp.module';
import { PrivacyPolicyModule } from './privacy-policy/privacy-policy.module';
import { AutomationModule } from './automation/automation.module';
import { RegionModule } from './region/region.module';
import { TimeZoneModule } from './timezone/timezone.module';
import { VisitorPasswordModule } from './vistor-password/visitor-password.module';
import { ScheduleModule } from './schedule/schedule.module';
import { SpaceModule } from './space/space.module';
import { ProductModule } from './product';
import { ProjectModule } from './project';
import { RegionModule } from './region/region.module';
import { RoleModule } from './role/role.module';
import { SceneModule } from './scene/scene.module';
import { ScheduleModule } from './schedule/schedule.module';
import { SpaceModelModule } from './space-model';
import { SpaceModule } from './space/space.module';
import { TagModule } from './tags/tags.module';
import { InviteUserModule } from './invite-user/invite-user.module';
import { PermissionModule } from './permission/permission.module';
import { RoleModule } from './role/role.module';
import { TermsConditionsModule } from './terms-conditions/terms-conditions.module';
import { TimeZoneModule } from './timezone/timezone.module';
import { UserDevicePermissionModule } from './user-device-permission/user-device-permission.module';
import { UserNotificationModule } from './user-notification/user-notification.module';
import { UserModule } from './users/user.module';
import { VisitorPasswordModule } from './vistor-password/visitor-password.module';
import { PrivacyPolicyModule } from './privacy-policy/privacy-policy.module';
import { TagModule } from './tags/tags.module';
import { ClientModule } from './client/client.module';
import { DeviceCommissionModule } from './commission-device/commission-device.module';
import { PowerClampModule } from './power-clamp/power-clamp.module';
import { WinstonModule } from 'nest-winston';
import { ThrottlerGuard, ThrottlerModule } from '@nestjs/throttler';
import { HealthModule } from './health/health.module';
import { ThrottlerGuard } from '@nestjs/throttler';
import { ThrottlerModule } from '@nestjs/throttler/dist/throttler.module';
import { isArray } from 'class-validator';
import { winstonLoggerOptions } from '../libs/common/src/logger/services/winston.logger';
import { AqiModule } from './aqi/aqi.module';
import { OccupancyModule } from './occupancy/occupancy.module';
import { WeatherModule } from './weather/weather.module';
import { ScheduleModule as NestScheduleModule } from '@nestjs/schedule';
import { SchedulerModule } from './scheduler/scheduler.module';
import { BookingModule } from './booking';
@Module({
imports: [
ConfigModule.forRoot({
load: config,
}),
ThrottlerModule.forRoot({
throttlers: [{ ttl: 60000, limit: 100 }],
generateKey: (context) => {
const req = context.switchToHttp().getRequest();
console.log('Real IP:', req.headers['x-forwarded-for']);
return req.headers['x-forwarded-for']
? isArray(req.headers['x-forwarded-for'])
? req.headers['x-forwarded-for'][0].split(':')[0]
: req.headers['x-forwarded-for'].split(':')[0]
: req.ip;
},
}),
/* ThrottlerModule.forRoot({
throttlers: [{ ttl: 100000, limit: 30 }],
}), */
WinstonModule.forRoot(winstonLoggerOptions),
ClientModule,
AuthenticationModule,
@ -95,21 +79,16 @@ import { BookingModule } from './booking';
PowerClampModule,
HealthModule,
OccupancyModule,
WeatherModule,
AqiModule,
SchedulerModule,
NestScheduleModule.forRoot(),
BookingModule,
],
providers: [
{
provide: APP_INTERCEPTOR,
useClass: LoggingInterceptor,
},
{
/* {
provide: APP_GUARD,
useClass: ThrottlerGuard,
},
}, */
],
})
export class AppModule {}

View File

@ -1,11 +0,0 @@
import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
import { AqiService } from './services';
import { AqiController } from './controllers';
@Module({
imports: [ConfigModule],
controllers: [AqiController],
providers: [AqiService, SqlLoaderService],
})
export class AqiModule {}

View File

@ -1,64 +0,0 @@
import { Controller, Get, Param, Query, UseGuards } from '@nestjs/common';
import {
ApiTags,
ApiBearerAuth,
ApiOperation,
ApiParam,
} from '@nestjs/swagger';
import { EnableDisableStatusEnum } from '@app/common/constants/days.enum';
import { ControllerRoute } from '@app/common/constants/controller-route';
import { JwtAuthGuard } from '@app/common/guards/jwt.auth.guard';
import { AqiService } from '../services/aqi.service';
import {
GetAqiDailyBySpaceDto,
GetAqiPollutantBySpaceDto,
} from '../dto/get-aqi.dto';
import { BaseResponseDto } from '@app/common/dto/base.response.dto';
import { SpaceParamsDto } from '../dto/aqi-params.dto';
@ApiTags('AQI Module')
@Controller({
version: EnableDisableStatusEnum.ENABLED,
path: ControllerRoute.AQI.ROUTE,
})
export class AqiController {
constructor(private readonly aqiService: AqiService) {}
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Get('range/space/:spaceUuid')
@ApiOperation({
summary: ControllerRoute.AQI.ACTIONS.GET_AQI_RANGE_DATA_SUMMARY,
description: ControllerRoute.AQI.ACTIONS.GET_AQI_RANGE_DATA_DESCRIPTION,
})
@ApiParam({
name: 'spaceUuid',
description: 'UUID of the Space',
required: true,
})
async getAQIRangeDataBySpace(
@Param() params: SpaceParamsDto,
@Query() query: GetAqiDailyBySpaceDto,
): Promise<BaseResponseDto> {
return await this.aqiService.getAQIRangeDataBySpace(params, query);
}
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Get('distribution/space/:spaceUuid')
@ApiOperation({
summary: ControllerRoute.AQI.ACTIONS.GET_AQI_DISTRIBUTION_DATA_SUMMARY,
description:
ControllerRoute.AQI.ACTIONS.GET_AQI_DISTRIBUTION_DATA_DESCRIPTION,
})
@ApiParam({
name: 'spaceUuid',
description: 'UUID of the Space',
required: true,
})
async getAQIDistributionDataBySpace(
@Param() params: SpaceParamsDto,
@Query() query: GetAqiPollutantBySpaceDto,
): Promise<BaseResponseDto> {
return await this.aqiService.getAQIDistributionDataBySpace(params, query);
}
}

View File

@ -1 +0,0 @@
export * from './aqi.controller';

View File

@ -1,7 +0,0 @@
import { IsNotEmpty, IsUUID } from 'class-validator';
export class SpaceParamsDto {
@IsUUID('4', { message: 'Invalid UUID format' })
@IsNotEmpty()
spaceUuid: string;
}

View File

@ -1,37 +0,0 @@
import { PollutantType } from '@app/common/constants/pollutants.enum';
import { ApiProperty } from '@nestjs/swagger';
import { Matches, IsNotEmpty, IsString } from 'class-validator';
export class GetAqiDailyBySpaceDto {
@ApiProperty({
description: 'Month and year in format YYYY-MM',
example: '2025-03',
required: true,
})
@Matches(/^\d{4}-(0[1-9]|1[0-2])$/, {
message: 'monthDate must be in YYYY-MM format',
})
@IsNotEmpty()
monthDate: string;
}
export class GetAqiPollutantBySpaceDto {
@ApiProperty({
description: 'Pollutant Type',
enum: PollutantType,
example: PollutantType.AQI,
required: true,
})
@IsString()
@IsNotEmpty()
public pollutantType: string;
@ApiProperty({
description: 'Month and year in format YYYY-MM',
example: '2025-03',
required: true,
})
@Matches(/^\d{4}-(0[1-9]|1[0-2])$/, {
message: 'monthDate must be in YYYY-MM format',
})
@IsNotEmpty()
monthDate: string;
}

View File

@ -1,138 +0,0 @@
import { HttpException, HttpStatus, Injectable } from '@nestjs/common';
import {
GetAqiDailyBySpaceDto,
GetAqiPollutantBySpaceDto,
} from '../dto/get-aqi.dto';
import { SuccessResponseDto } from '@app/common/dto/success.response.dto';
import { SpaceParamsDto } from '../dto/aqi-params.dto';
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
import { DataSource } from 'typeorm';
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
import { BaseResponseDto } from '@app/common/dto/base.response.dto';
import { convertKeysToCamelCase } from '@app/common/helper/camelCaseConverter';
import { PollutantType } from '@app/common/constants/pollutants.enum';
@Injectable()
export class AqiService {
constructor(
private readonly sqlLoader: SqlLoaderService,
private readonly dataSource: DataSource,
) {}
async getAQIDistributionDataBySpace(
params: SpaceParamsDto,
query: GetAqiPollutantBySpaceDto,
): Promise<BaseResponseDto> {
const { monthDate, pollutantType } = query;
const { spaceUuid } = params;
try {
const data = await this.executeProcedure(
'fact_daily_space_aqi',
'proceduce_select_daily_space_aqi',
[spaceUuid, monthDate],
);
const categories = [
'good',
'moderate',
'unhealthy_sensitive',
'unhealthy',
'very_unhealthy',
'hazardous',
];
const transformedData = data.map((item) => {
const date = new Date(item.event_date).toLocaleDateString('en-CA'); // YYYY-MM-DD
const categoryData = categories.map((category) => {
const key = `${category}_${pollutantType.toLowerCase()}_percentage`;
return {
type: category,
percentage: item[key] ?? 0,
};
});
return { date, data: categoryData };
});
const response = this.buildResponse(
`AQI distribution data fetched successfully for ${spaceUuid} space and pollutant ${pollutantType}`,
transformedData,
);
return response;
} catch (error) {
console.error('Failed to fetch AQI distribution data', {
error,
spaceUuid,
});
throw new HttpException(
error.response?.message || 'Failed to fetch AQI distribution data',
error.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async getAQIRangeDataBySpace(
params: SpaceParamsDto,
query: GetAqiDailyBySpaceDto,
): Promise<BaseResponseDto> {
const { monthDate } = query;
const { spaceUuid } = params;
try {
const data = await this.executeProcedure(
'fact_daily_space_aqi',
'proceduce_select_daily_space_aqi',
[spaceUuid, monthDate],
);
// Define pollutants dynamically
const pollutants = Object.values(PollutantType);
const transformedData = data.map((item) => {
const date = new Date(item.event_date).toLocaleDateString('en-CA'); // YYYY-MM-DD
const dailyData = pollutants.map((type) => ({
type,
min: item[`daily_min_${type}`],
max: item[`daily_max_${type}`],
average: item[`daily_avg_${type}`],
}));
return { date, data: dailyData };
});
const response = this.buildResponse(
`AQI data fetched successfully for ${spaceUuid} space`,
transformedData,
);
return convertKeysToCamelCase(response);
} catch (error) {
console.error('Failed to fetch AQI data', {
error,
spaceUuid,
});
throw new HttpException(
error.response?.message || 'Failed to fetch AQI data',
error.status || HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
private buildResponse(message: string, data: any[]) {
return new SuccessResponseDto({
message,
data,
statusCode: HttpStatus.OK,
});
}
private async executeProcedure(
procedureFolderName: string,
procedureFileName: string,
params: (string | number | null)[],
): Promise<any[]> {
const query = this.loadQuery(procedureFolderName, procedureFileName);
return await this.dataSource.query(query, params);
}
private loadQuery(folderName: string, fileName: string): string {
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
}
}

View File

@ -1 +0,0 @@
export * from './aqi.service';

View File

@ -1,25 +1,25 @@
import { RoleType } from '@app/common/constants/role.type.enum';
import { differenceInSeconds } from '@app/common/helper/differenceInSeconds';
import { UserRepository } from '../../../libs/common/src/modules/user/repositories';
import {
BadRequestException,
ForbiddenException,
Injectable,
} from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import * as argon2 from 'argon2';
import { RoleService } from 'src/role/services';
import { LessThan, MoreThan } from 'typeorm';
import { AuthService } from '../../../libs/common/src/auth/services/auth.service';
import { OtpType } from '../../../libs/common/src/constants/otp-type.enum';
import { HelperHashService } from '../../../libs/common/src/helper/services';
import { UserSessionRepository } from '../../../libs/common/src/modules/session/repositories/session.repository';
import { UserEntity } from '../../../libs/common/src/modules/user/entities/user.entity';
import { UserRepository } from '../../../libs/common/src/modules/user/repositories';
import { UserOtpRepository } from '../../../libs/common/src/modules/user/repositories/user.repository';
import { EmailService } from '../../../libs/common/src/util/email.service';
import { ForgetPasswordDto, UserOtpDto, VerifyOtpDto } from '../dtos';
import { UserSignUpDto } from '../dtos/user-auth.dto';
import { HelperHashService } from '../../../libs/common/src/helper/services';
import { UserLoginDto } from '../dtos/user-login.dto';
import { AuthService } from '../../../libs/common/src/auth/services/auth.service';
import { UserSessionRepository } from '../../../libs/common/src/modules/session/repositories/session.repository';
import { UserOtpRepository } from '../../../libs/common/src/modules/user/repositories/user.repository';
import { ForgetPasswordDto, UserOtpDto, VerifyOtpDto } from '../dtos';
import { EmailService } from '../../../libs/common/src/util/email.service';
import { OtpType } from '../../../libs/common/src/constants/otp-type.enum';
import { UserEntity } from '../../../libs/common/src/modules/user/entities/user.entity';
import * as argon2 from 'argon2';
import { differenceInSeconds } from '@app/common/helper/differenceInSeconds';
import { LessThan, MoreThan } from 'typeorm';
import { ConfigService } from '@nestjs/config';
import { RoleService } from 'src/role/services';
import { RoleType } from '@app/common/constants/role.type.enum';
@Injectable()
export class UserAuthService {
@ -108,7 +108,7 @@ export class UserAuthService {
async userLogin(data: UserLoginDto) {
try {
let user: Omit<UserEntity, 'password'>;
let user;
if (data.googleCode) {
const googleUserData = await this.authService.login({
googleCode: data.googleCode,
@ -145,7 +145,7 @@ export class UserAuthService {
}
const session = await Promise.all([
await this.sessionRepository.update(
{ userId: user?.['id'] },
{ userId: user.id },
{
isLoggedOut: true,
},
@ -166,7 +166,6 @@ export class UserAuthService {
hasAcceptedAppAgreement: user.hasAcceptedAppAgreement,
project: user.project,
sessionId: session[1].uuid,
bookingPoints: user.bookingPoints,
});
return res;
} catch (error) {
@ -348,7 +347,6 @@ export class UserAuthService {
userId: user.uuid,
uuid: user.uuid,
type,
bookingPoints: user.bookingPoints,
sessionId,
});
await this.authService.updateRefreshToken(user.uuid, tokens.refreshToken);

View File

@ -1,29 +0,0 @@
import { BookingRepositoryModule } from '@app/common/modules/booking/booking.repository.module';
import { BookableSpaceEntityRepository } from '@app/common/modules/booking/repositories/bookable-space.repository';
import { BookingEntityRepository } from '@app/common/modules/booking/repositories/booking.repository';
import { SpaceRepository } from '@app/common/modules/space';
import { UserRepository } from '@app/common/modules/user/repositories';
import { EmailService } from '@app/common/util/email.service';
import { Global, Module } from '@nestjs/common';
import { BookableSpaceController } from './controllers/bookable-space.controller';
import { BookingController } from './controllers/booking.controller';
import { BookableSpaceService } from './services/bookable-space.service';
import { BookingService } from './services/booking.service';
@Global()
@Module({
imports: [BookingRepositoryModule],
controllers: [BookableSpaceController, BookingController],
providers: [
BookableSpaceService,
BookingService,
EmailService,
BookableSpaceEntityRepository,
BookingEntityRepository,
SpaceRepository,
UserRepository,
],
exports: [BookableSpaceService, BookingService],
})
export class BookingModule {}

View File

@ -1,107 +0,0 @@
import { ControllerRoute } from '@app/common/constants/controller-route';
import { EnableDisableStatusEnum } from '@app/common/constants/days.enum';
import { BaseResponseDto } from '@app/common/dto/base.response.dto';
import { JwtAuthGuard } from '@app/common/guards/jwt.auth.guard';
import {
Body,
Controller,
Get,
Param,
ParseUUIDPipe,
Post,
Put,
Query,
Req,
UseGuards,
} from '@nestjs/common';
import { ApiBearerAuth, ApiOperation, ApiTags } from '@nestjs/swagger';
import { PageResponse } from '@app/common/dto/pagination.response.dto';
import { SuccessResponseDto } from '@app/common/dto/success.response.dto';
import { plainToInstance } from 'class-transformer';
import { BookableSpaceRequestDto } from '../dtos/bookable-space-request.dto';
import { BookableSpaceResponseDto } from '../dtos/bookable-space-response.dto';
import { CreateBookableSpaceDto } from '../dtos/create-bookable-space.dto';
import { UpdateBookableSpaceDto } from '../dtos/update-bookable-space.dto';
import { BookableSpaceService } from '../services/bookable-space.service';
@ApiTags('Booking Module')
@Controller({
version: EnableDisableStatusEnum.ENABLED,
path: ControllerRoute.BOOKABLE_SPACES.ROUTE,
})
export class BookableSpaceController {
constructor(private readonly bookableSpaceService: BookableSpaceService) {}
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Post()
@ApiOperation({
summary:
ControllerRoute.BOOKABLE_SPACES.ACTIONS.ADD_BOOKABLE_SPACES_SUMMARY,
description:
ControllerRoute.BOOKABLE_SPACES.ACTIONS.ADD_BOOKABLE_SPACES_DESCRIPTION,
})
async create(@Body() dto: CreateBookableSpaceDto): Promise<BaseResponseDto> {
const result = await this.bookableSpaceService.create(dto);
return new SuccessResponseDto({
data: result,
message: 'Successfully created bookable spaces',
});
}
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Get()
@ApiOperation({
summary:
ControllerRoute.BOOKABLE_SPACES.ACTIONS.GET_ALL_BOOKABLE_SPACES_SUMMARY,
description:
ControllerRoute.BOOKABLE_SPACES.ACTIONS
.GET_ALL_BOOKABLE_SPACES_DESCRIPTION,
})
async findAll(
@Query() query: BookableSpaceRequestDto,
@Req() req: Request,
): Promise<PageResponse<BookableSpaceResponseDto>> {
const project = req['user']?.project?.uuid;
if (!project) {
throw new Error('Project UUID is required in the request');
}
const { data, pagination } = await this.bookableSpaceService.findAll(
query,
project,
);
return new PageResponse<BookableSpaceResponseDto>(
{
data: data.map((space) =>
plainToInstance(BookableSpaceResponseDto, space, {
excludeExtraneousValues: true,
}),
),
message: 'Successfully fetched all bookable spaces',
},
pagination,
);
}
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Put(':spaceUuid')
@ApiOperation({
summary:
ControllerRoute.BOOKABLE_SPACES.ACTIONS.UPDATE_BOOKABLE_SPACES_SUMMARY,
description:
ControllerRoute.BOOKABLE_SPACES.ACTIONS
.UPDATE_BOOKABLE_SPACES_DESCRIPTION,
})
async update(
@Param('spaceUuid', ParseUUIDPipe) spaceUuid: string,
@Body() dto: UpdateBookableSpaceDto,
): Promise<BaseResponseDto> {
const result = await this.bookableSpaceService.update(spaceUuid, dto);
return new SuccessResponseDto({
data: result,
message: 'Successfully updated bookable spaces',
});
}
}

Some files were not shown because too many files have changed in this diff Show More