mirror of
https://github.com/SyncrowIOT/backend.git
synced 2025-11-26 17:54:54 +00:00
Compare commits
1 Commits
SP-1778-be
...
DATA-occup
| Author | SHA1 | Date | |
|---|---|---|---|
| 6613b49fc0 |
@ -125,7 +125,7 @@ import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
|
||||
logger: typeOrmLogger,
|
||||
extra: {
|
||||
charset: 'utf8mb4',
|
||||
max: 50, // set pool max size
|
||||
max: 20, // set pool max size
|
||||
idleTimeoutMillis: 5000, // close idle clients after 5 second
|
||||
connectionTimeoutMillis: 12_000, // return an error after 11 second if connection could not be established
|
||||
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
|
||||
|
||||
@ -24,7 +24,6 @@ import { PowerClampEnergyEnum } from '@app/common/constants/power.clamp.enargy.e
|
||||
import { PresenceSensorEnum } from '@app/common/constants/presence.sensor.enum';
|
||||
import { OccupancyService } from '@app/common/helper/services/occupancy.service';
|
||||
import { AqiDataService } from '@app/common/helper/services/aqi.data.service';
|
||||
import { DataSource, QueryRunner } from 'typeorm';
|
||||
@Injectable()
|
||||
export class DeviceStatusFirebaseService {
|
||||
private tuya: TuyaContext;
|
||||
@ -36,7 +35,6 @@ export class DeviceStatusFirebaseService {
|
||||
private readonly occupancyService: OccupancyService,
|
||||
private readonly aqiDataService: AqiDataService,
|
||||
private deviceStatusLogRepository: DeviceStatusLogRepository,
|
||||
private readonly dataSource: DataSource,
|
||||
) {
|
||||
const accessKey = this.configService.get<string>('auth-config.ACCESS_KEY');
|
||||
const secretKey = this.configService.get<string>('auth-config.SECRET_KEY');
|
||||
@ -81,46 +79,28 @@ export class DeviceStatusFirebaseService {
|
||||
async addDeviceStatusToFirebase(
|
||||
addDeviceStatusDto: AddDeviceStatusDto,
|
||||
): Promise<AddDeviceStatusDto | null> {
|
||||
const queryRunner = this.dataSource.createQueryRunner();
|
||||
await queryRunner.connect();
|
||||
await queryRunner.startTransaction();
|
||||
try {
|
||||
const device = await this.getDeviceByDeviceTuyaUuid(
|
||||
addDeviceStatusDto.deviceTuyaUuid,
|
||||
queryRunner,
|
||||
);
|
||||
|
||||
if (device?.uuid) {
|
||||
const result = await this.createDeviceStatusFirebase(
|
||||
{
|
||||
deviceUuid: device.uuid,
|
||||
...addDeviceStatusDto,
|
||||
productType: device.productDevice.prodType,
|
||||
},
|
||||
queryRunner,
|
||||
);
|
||||
await queryRunner.commitTransaction();
|
||||
return result;
|
||||
return await this.createDeviceStatusFirebase({
|
||||
deviceUuid: device.uuid,
|
||||
...addDeviceStatusDto,
|
||||
productType: device.productDevice.prodType,
|
||||
});
|
||||
}
|
||||
// Return null if device not found or no UUID
|
||||
await queryRunner.rollbackTransaction();
|
||||
return null;
|
||||
} catch (error) {
|
||||
await queryRunner.rollbackTransaction();
|
||||
// Handle the error silently, perhaps log it internally or ignore it
|
||||
return null;
|
||||
} finally {
|
||||
await queryRunner.release();
|
||||
}
|
||||
}
|
||||
async getDeviceByDeviceTuyaUuid(
|
||||
deviceTuyaUuid: string,
|
||||
queryRunner?: QueryRunner,
|
||||
) {
|
||||
const repo = queryRunner
|
||||
? queryRunner.manager.getRepository(this.deviceRepository.target)
|
||||
: this.deviceRepository;
|
||||
|
||||
return await repo.findOne({
|
||||
async getDeviceByDeviceTuyaUuid(deviceTuyaUuid: string) {
|
||||
return await this.deviceRepository.findOne({
|
||||
where: {
|
||||
deviceTuyaUuid,
|
||||
isActive: true,
|
||||
@ -128,7 +108,6 @@ export class DeviceStatusFirebaseService {
|
||||
relations: ['productDevice'],
|
||||
});
|
||||
}
|
||||
|
||||
async getDevicesInstructionStatus(deviceUuid: string) {
|
||||
try {
|
||||
const deviceDetails = await this.getDeviceByDeviceUuid(deviceUuid);
|
||||
@ -174,14 +153,9 @@ export class DeviceStatusFirebaseService {
|
||||
}
|
||||
async getDeviceByDeviceUuid(
|
||||
deviceUuid: string,
|
||||
withProductDevice = true,
|
||||
queryRunner?: QueryRunner,
|
||||
withProductDevice: boolean = true,
|
||||
) {
|
||||
const repo = queryRunner
|
||||
? queryRunner.manager.getRepository(this.deviceRepository.target)
|
||||
: this.deviceRepository;
|
||||
|
||||
return await repo.findOne({
|
||||
return await this.deviceRepository.findOne({
|
||||
where: {
|
||||
uuid: deviceUuid,
|
||||
isActive: true,
|
||||
@ -189,20 +163,21 @@ export class DeviceStatusFirebaseService {
|
||||
...(withProductDevice && { relations: ['productDevice'] }),
|
||||
});
|
||||
}
|
||||
|
||||
async createDeviceStatusFirebase(
|
||||
addDeviceStatusDto: AddDeviceStatusDto,
|
||||
queryRunner?: QueryRunner,
|
||||
): Promise<any> {
|
||||
const dataRef = ref(
|
||||
this.firebaseDb,
|
||||
`device-status/${addDeviceStatusDto.deviceUuid}`,
|
||||
);
|
||||
|
||||
// Step 1: Update Firebase Realtime Database
|
||||
// Use a transaction to handle concurrent updates
|
||||
await runTransaction(dataRef, (existingData) => {
|
||||
if (!existingData) existingData = {};
|
||||
if (!existingData) {
|
||||
existingData = {};
|
||||
}
|
||||
|
||||
// Assign default values if fields are not present
|
||||
if (!existingData.deviceTuyaUuid) {
|
||||
existingData.deviceTuyaUuid = addDeviceStatusDto.deviceTuyaUuid;
|
||||
}
|
||||
@ -216,15 +191,18 @@ export class DeviceStatusFirebaseService {
|
||||
existingData.status = [];
|
||||
}
|
||||
|
||||
// Merge incoming status with existing status
|
||||
// Create a map to track existing status codes
|
||||
const statusMap = new Map(
|
||||
existingData.status.map((item) => [item.code, item.value]),
|
||||
);
|
||||
|
||||
// Update or add status codes
|
||||
|
||||
for (const statusItem of addDeviceStatusDto.status) {
|
||||
statusMap.set(statusItem.code, statusItem.value);
|
||||
}
|
||||
|
||||
// Convert the map back to an array format
|
||||
existingData.status = Array.from(statusMap, ([code, value]) => ({
|
||||
code,
|
||||
value,
|
||||
@ -233,9 +211,9 @@ export class DeviceStatusFirebaseService {
|
||||
return existingData;
|
||||
});
|
||||
|
||||
// Step 2: Save device status log entries
|
||||
const newLogs = addDeviceStatusDto.log.properties.map((property) =>
|
||||
this.deviceStatusLogRepository.create({
|
||||
// Save logs to your repository
|
||||
const newLogs = addDeviceStatusDto.log.properties.map((property) => {
|
||||
return this.deviceStatusLogRepository.create({
|
||||
deviceId: addDeviceStatusDto.deviceUuid,
|
||||
deviceTuyaId: addDeviceStatusDto.deviceTuyaUuid,
|
||||
productId: addDeviceStatusDto.log.productId,
|
||||
@ -244,19 +222,10 @@ export class DeviceStatusFirebaseService {
|
||||
value: property.value,
|
||||
eventId: addDeviceStatusDto.log.dataId,
|
||||
eventTime: new Date(property.time).toISOString(),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
await this.deviceStatusLogRepository.save(newLogs);
|
||||
|
||||
if (queryRunner) {
|
||||
const repo = queryRunner.manager.getRepository(
|
||||
this.deviceStatusLogRepository.target,
|
||||
);
|
||||
await repo.save(newLogs);
|
||||
} else {
|
||||
await this.deviceStatusLogRepository.save(newLogs);
|
||||
}
|
||||
|
||||
// Step 3: Trigger additional data services
|
||||
if (addDeviceStatusDto.productType === ProductType.PC) {
|
||||
const energyCodes = new Set([
|
||||
PowerClampEnergyEnum.ENERGY_CONSUMED,
|
||||
@ -300,8 +269,7 @@ export class DeviceStatusFirebaseService {
|
||||
addDeviceStatusDto.deviceUuid,
|
||||
);
|
||||
}
|
||||
|
||||
// Step 4: Return updated Firebase status
|
||||
// Return the updated data
|
||||
const snapshot: DataSnapshot = await get(dataRef);
|
||||
return snapshot.val();
|
||||
}
|
||||
|
||||
@ -36,18 +36,9 @@ export class AqiDataService {
|
||||
procedureFileName: string,
|
||||
params: (string | number | null)[],
|
||||
): Promise<void> {
|
||||
const queryRunner = this.dataSource.createQueryRunner();
|
||||
await queryRunner.connect();
|
||||
try {
|
||||
const query = this.loadQuery(procedureFolderName, procedureFileName);
|
||||
await queryRunner.query(query, params);
|
||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||
} catch (err) {
|
||||
console.error(`Failed to execute procedure ${procedureFileName}:`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
await queryRunner.release();
|
||||
}
|
||||
const query = this.loadQuery(procedureFolderName, procedureFileName);
|
||||
await this.dataSource.query(query, params);
|
||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||
}
|
||||
|
||||
private loadQuery(folderName: string, fileName: string): string {
|
||||
|
||||
@ -57,18 +57,9 @@ export class OccupancyService {
|
||||
procedureFileName: string,
|
||||
params: (string | number | null)[],
|
||||
): Promise<void> {
|
||||
const queryRunner = this.dataSource.createQueryRunner();
|
||||
await queryRunner.connect();
|
||||
try {
|
||||
const query = this.loadQuery(procedureFolderName, procedureFileName);
|
||||
await queryRunner.query(query, params);
|
||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||
} catch (err) {
|
||||
console.error(`Failed to execute procedure ${procedureFileName}:`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
await queryRunner.release();
|
||||
}
|
||||
const query = this.loadQuery(procedureFolderName, procedureFileName);
|
||||
await this.dataSource.query(query, params);
|
||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||
}
|
||||
|
||||
private loadQuery(folderName: string, fileName: string): string {
|
||||
|
||||
@ -46,21 +46,12 @@ export class PowerClampService {
|
||||
procedureFileName: string,
|
||||
params: (string | number | null)[],
|
||||
): Promise<void> {
|
||||
const queryRunner = this.dataSource.createQueryRunner();
|
||||
await queryRunner.connect();
|
||||
try {
|
||||
const query = this.loadQuery(
|
||||
'fact_device_energy_consumed',
|
||||
procedureFileName,
|
||||
);
|
||||
await queryRunner.query(query, params);
|
||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||
} catch (err) {
|
||||
console.error(`Failed to execute procedure ${procedureFileName}:`, err);
|
||||
throw err;
|
||||
} finally {
|
||||
await queryRunner.release();
|
||||
}
|
||||
const query = this.loadQuery(
|
||||
'fact_device_energy_consumed',
|
||||
procedureFileName,
|
||||
);
|
||||
await this.dataSource.query(query, params);
|
||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||
}
|
||||
|
||||
private loadQuery(folderName: string, fileName: string): string {
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
-- 1. Load all presence-related logs
|
||||
WITH device_logs AS (
|
||||
SELECT
|
||||
device.uuid AS device_id,
|
||||
@ -15,7 +16,7 @@ WITH device_logs AS (
|
||||
AND "device-status-log".code = 'presence_state'
|
||||
),
|
||||
|
||||
-- 1. All 'none' → presence or motion
|
||||
-- 2. Standard transitions: 'none' → 'motion' or 'presence'
|
||||
presence_transitions AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -23,10 +24,31 @@ presence_transitions AS (
|
||||
event_time::date AS event_date,
|
||||
value
|
||||
FROM device_logs
|
||||
WHERE (value = 'motion' OR value = 'presence') AND prev_value = 'none'
|
||||
WHERE value IN ('motion', 'presence') AND prev_value = 'none'
|
||||
),
|
||||
|
||||
-- 2. Cluster events per space_id within 30s
|
||||
-- 3. Fallback: days with 'motion' or 'presence' but no 'none'
|
||||
fallback_daily_presence AS (
|
||||
SELECT
|
||||
space_id,
|
||||
event_time::date AS event_date,
|
||||
MIN(event_time) AS event_time,
|
||||
'presence'::text AS value
|
||||
FROM device_logs
|
||||
WHERE value IN ('motion', 'presence', 'none')
|
||||
GROUP BY space_id, event_time::date
|
||||
HAVING BOOL_OR(value = 'motion') OR BOOL_OR(value = 'presence')
|
||||
AND NOT BOOL_OR(value = 'none')
|
||||
),
|
||||
|
||||
-- 4. Merge both detection sources
|
||||
all_presence_events AS (
|
||||
SELECT * FROM presence_transitions
|
||||
UNION ALL
|
||||
SELECT space_id, event_time, event_date, value FROM fallback_daily_presence
|
||||
),
|
||||
|
||||
-- 5. Cluster events per space_id within 30 seconds
|
||||
clustered_events AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -40,11 +62,11 @@ clustered_events AS (
|
||||
WHEN event_time - LAG(event_time) OVER (PARTITION BY space_id ORDER BY event_time) > INTERVAL '30 seconds'
|
||||
THEN 1 ELSE 0
|
||||
END AS new_cluster_flag
|
||||
FROM presence_transitions
|
||||
FROM all_presence_events
|
||||
) marked
|
||||
),
|
||||
|
||||
-- 3. Determine dominant type (motion vs presence) per cluster
|
||||
-- 6. Determine dominant type (motion vs presence) per cluster
|
||||
cluster_type AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -60,7 +82,7 @@ cluster_type AS (
|
||||
GROUP BY space_id, event_date, cluster_id
|
||||
),
|
||||
|
||||
-- 4. Count clusters by dominant type
|
||||
-- 7. Count clusters by dominant type
|
||||
summary AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -70,15 +92,16 @@ summary AS (
|
||||
COUNT(*) AS count_total_presence_detected
|
||||
FROM cluster_type
|
||||
GROUP BY space_id, event_date
|
||||
),
|
||||
|
||||
-- 8. Prepare final result
|
||||
final_table AS (
|
||||
SELECT *
|
||||
FROM summary
|
||||
ORDER BY space_id, event_date
|
||||
)
|
||||
|
||||
-- 5. Output
|
||||
, final_table as (
|
||||
SELECT *
|
||||
FROM summary
|
||||
ORDER BY space_id, event_date)
|
||||
|
||||
|
||||
-- 9. Insert or upsert into the destination table
|
||||
INSERT INTO public."presence-sensor-daily-space-detection" (
|
||||
space_uuid,
|
||||
event_date,
|
||||
@ -97,4 +120,4 @@ ON CONFLICT (space_uuid, event_date) DO UPDATE
|
||||
SET
|
||||
count_motion_detected = EXCLUDED.count_motion_detected,
|
||||
count_presence_detected = EXCLUDED.count_presence_detected,
|
||||
count_total_presence_detected = EXCLUDED.count_total_presence_detected;
|
||||
count_total_presence_detected = EXCLUDED.count_total_presence_detected;
|
||||
|
||||
@ -4,6 +4,7 @@ WITH params AS (
|
||||
$2::uuid AS space_id
|
||||
),
|
||||
|
||||
-- 1. Load logs
|
||||
device_logs AS (
|
||||
SELECT
|
||||
device.uuid AS device_id,
|
||||
@ -21,7 +22,7 @@ device_logs AS (
|
||||
AND "device-status-log".code = 'presence_state'
|
||||
),
|
||||
|
||||
-- 1. All 'none' → presence or motion
|
||||
-- 2. Transitions from 'none' → motion/presence
|
||||
presence_transitions AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -29,10 +30,30 @@ presence_transitions AS (
|
||||
event_time::date AS event_date,
|
||||
value
|
||||
FROM device_logs
|
||||
WHERE (value = 'motion' OR value = 'presence') AND prev_value = 'none'
|
||||
WHERE value IN ('motion', 'presence') AND prev_value = 'none'
|
||||
),
|
||||
|
||||
-- 2. Cluster events per space_id within 30s
|
||||
-- 3. Fallback: days with motion/presence but no 'none'
|
||||
fallback_daily_presence AS (
|
||||
SELECT
|
||||
space_id,
|
||||
event_time::date AS event_date,
|
||||
MIN(event_time) AS event_time,
|
||||
'presence'::text AS value
|
||||
FROM device_logs
|
||||
GROUP BY space_id, event_time::date
|
||||
HAVING BOOL_OR(value = 'motion') OR BOOL_OR(value = 'presence')
|
||||
AND NOT BOOL_OR(value = 'none')
|
||||
),
|
||||
|
||||
-- 4. Combine standard and fallback detections
|
||||
all_presence_events AS (
|
||||
SELECT * FROM presence_transitions
|
||||
UNION ALL
|
||||
SELECT * FROM fallback_daily_presence
|
||||
),
|
||||
|
||||
-- 5. Cluster detections (within 30s)
|
||||
clustered_events AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -46,11 +67,11 @@ clustered_events AS (
|
||||
WHEN event_time - LAG(event_time) OVER (PARTITION BY space_id ORDER BY event_time) > INTERVAL '30 seconds'
|
||||
THEN 1 ELSE 0
|
||||
END AS new_cluster_flag
|
||||
FROM presence_transitions
|
||||
FROM all_presence_events
|
||||
) marked
|
||||
),
|
||||
|
||||
-- 3. Determine dominant type (motion vs presence) per cluster
|
||||
-- 6. Dominant type per cluster
|
||||
cluster_type AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -66,7 +87,7 @@ cluster_type AS (
|
||||
GROUP BY space_id, event_date, cluster_id
|
||||
),
|
||||
|
||||
-- 4. Count clusters by dominant type
|
||||
-- 7. Count presence by type
|
||||
summary AS (
|
||||
SELECT
|
||||
space_id,
|
||||
@ -76,22 +97,22 @@ summary AS (
|
||||
COUNT(*) AS count_total_presence_detected
|
||||
FROM cluster_type
|
||||
GROUP BY space_id, event_date
|
||||
),
|
||||
|
||||
-- 8. Filter by params and return final table
|
||||
final_table AS (
|
||||
SELECT
|
||||
summary.space_id,
|
||||
summary.event_date,
|
||||
count_motion_detected,
|
||||
count_presence_detected,
|
||||
count_total_presence_detected
|
||||
FROM summary
|
||||
JOIN params p ON summary.space_id = p.space_id
|
||||
WHERE p.event_date IS NULL OR summary.event_date = p.event_date
|
||||
)
|
||||
|
||||
-- 5. Output
|
||||
, final_table as (
|
||||
SELECT summary.space_id,
|
||||
summary.event_date,
|
||||
count_motion_detected,
|
||||
count_presence_detected,
|
||||
count_total_presence_detected
|
||||
FROM summary
|
||||
JOIN params P ON true
|
||||
where summary.space_id = P.space_id
|
||||
and (P.event_date IS NULL or summary.event_date::date = P.event_date)
|
||||
ORDER BY space_id, event_date)
|
||||
|
||||
|
||||
-- 9. Insert or upsert into the table
|
||||
INSERT INTO public."presence-sensor-daily-space-detection" (
|
||||
space_uuid,
|
||||
event_date,
|
||||
|
||||
@ -30,6 +30,19 @@ presence_detection AS (
|
||||
FROM device_logs
|
||||
),
|
||||
|
||||
fallback_daily_presence AS (
|
||||
SELECT
|
||||
space_id,
|
||||
event_time::date AS event_date,
|
||||
0 AS event_hour,
|
||||
COUNT(*) > 0 AS has_presence,
|
||||
BOOL_OR(value = 'none') AS has_none
|
||||
FROM device_logs
|
||||
WHERE value IN ('motion', 'presence', 'none')
|
||||
GROUP BY space_id, event_time::date
|
||||
HAVING COUNT(*) > 0 AND NOT BOOL_OR(value = 'none')
|
||||
),
|
||||
|
||||
space_level_presence_events AS (
|
||||
SELECT DISTINCT
|
||||
pd.space_id,
|
||||
@ -38,6 +51,15 @@ space_level_presence_events AS (
|
||||
pd.event_time
|
||||
FROM presence_detection pd
|
||||
WHERE presence_started = 1
|
||||
|
||||
UNION
|
||||
|
||||
SELECT
|
||||
fdp.space_id,
|
||||
fdp.event_date,
|
||||
fdp.event_hour,
|
||||
NULL::timestamp AS event_time
|
||||
FROM fallback_daily_presence fdp
|
||||
),
|
||||
|
||||
space_level_presence_summary AS (
|
||||
@ -77,3 +99,4 @@ LEFT JOIN space_level_presence_summary pds
|
||||
ORDER BY space_id, event_date, event_hour;
|
||||
|
||||
|
||||
|
||||
|
||||
@ -190,26 +190,24 @@ export class CommunityService {
|
||||
.distinct(true);
|
||||
|
||||
if (includeSpaces) {
|
||||
qb.leftJoinAndSelect(
|
||||
'c.spaces',
|
||||
'space',
|
||||
'space.disabled = :disabled AND space.spaceName != :orphanSpaceName',
|
||||
{ disabled: false, orphanSpaceName: ORPHAN_SPACE_NAME },
|
||||
)
|
||||
qb.leftJoinAndSelect('c.spaces', 'space', 'space.disabled = false')
|
||||
.leftJoinAndSelect('space.parent', 'parent')
|
||||
.leftJoinAndSelect(
|
||||
'space.children',
|
||||
'children',
|
||||
'children.disabled = :disabled',
|
||||
{ disabled: false },
|
||||
);
|
||||
// .leftJoinAndSelect('space.spaceModel', 'spaceModel')
|
||||
)
|
||||
// .leftJoinAndSelect('space.spaceModel', 'spaceModel')
|
||||
.andWhere('space.spaceName != :orphanSpaceName', {
|
||||
orphanSpaceName: ORPHAN_SPACE_NAME,
|
||||
})
|
||||
.andWhere('space.disabled = :disabled', { disabled: false });
|
||||
}
|
||||
|
||||
if (search) {
|
||||
qb.andWhere(
|
||||
`c.name ILIKE :search ${includeSpaces ? 'OR space.space_name ILIKE :search' : ''}`,
|
||||
{ search },
|
||||
`c.name ILIKE '%${search}%' ${includeSpaces ? "OR space.space_name ILIKE '%" + search + "%'" : ''}`,
|
||||
);
|
||||
}
|
||||
|
||||
@ -217,21 +215,12 @@ export class CommunityService {
|
||||
|
||||
const { baseResponseDto, paginationResponseDto } =
|
||||
await customModel.findAll({ ...pageable, modelName: 'community' }, qb);
|
||||
if (includeSpaces) {
|
||||
baseResponseDto.data = baseResponseDto.data.map((community) => ({
|
||||
...community,
|
||||
spaces: this.spaceService.buildSpaceHierarchy(community.spaces || []),
|
||||
}));
|
||||
}
|
||||
return new PageResponse<CommunityDto>(
|
||||
baseResponseDto,
|
||||
paginationResponseDto,
|
||||
);
|
||||
} catch (error) {
|
||||
// Generic error handling
|
||||
if (error instanceof HttpException) {
|
||||
throw error;
|
||||
}
|
||||
throw new HttpException(
|
||||
error.message || 'An error occurred while fetching communities.',
|
||||
HttpStatus.INTERNAL_SERVER_ERROR,
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import { ControllerRoute } from '@app/common/constants/controller-route';
|
||||
import { EnableDisableStatusEnum } from '@app/common/constants/days.enum';
|
||||
import { Controller, Get, Param, Query, UseGuards } from '@nestjs/common';
|
||||
import { ApiBearerAuth, ApiOperation, ApiTags } from '@nestjs/swagger';
|
||||
import { Permissions } from 'src/decorators/permissions.decorator';
|
||||
import { PermissionsGuard } from 'src/guards/permissions.guard';
|
||||
import { GetDevicesFilterDto, ProjectParam } from '../dtos';
|
||||
import { DeviceService } from '../services/device.service';
|
||||
import { Controller, Get, Param, Query, UseGuards } from '@nestjs/common';
|
||||
import { ApiTags, ApiBearerAuth, ApiOperation } from '@nestjs/swagger';
|
||||
import { EnableDisableStatusEnum } from '@app/common/constants/days.enum';
|
||||
import { ControllerRoute } from '@app/common/constants/controller-route';
|
||||
import { PermissionsGuard } from 'src/guards/permissions.guard';
|
||||
import { Permissions } from 'src/decorators/permissions.decorator';
|
||||
import { GetDoorLockDevices, ProjectParam } from '../dtos';
|
||||
|
||||
@ApiTags('Device Module')
|
||||
@Controller({
|
||||
@ -25,7 +25,7 @@ export class DeviceProjectController {
|
||||
})
|
||||
async getAllDevices(
|
||||
@Param() param: ProjectParam,
|
||||
@Query() query: GetDevicesFilterDto,
|
||||
@Query() query: GetDoorLockDevices,
|
||||
) {
|
||||
return await this.deviceService.getAllDevices(param, query);
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import { DeviceTypeEnum } from '@app/common/constants/device-type.enum';
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import {
|
||||
IsArray,
|
||||
IsEnum,
|
||||
IsNotEmpty,
|
||||
IsOptional,
|
||||
@ -42,7 +41,16 @@ export class GetDeviceLogsDto {
|
||||
@IsOptional()
|
||||
public endTime: string;
|
||||
}
|
||||
|
||||
export class GetDoorLockDevices {
|
||||
@ApiProperty({
|
||||
description: 'Device Type',
|
||||
enum: DeviceTypeEnum,
|
||||
required: false,
|
||||
})
|
||||
@IsEnum(DeviceTypeEnum)
|
||||
@IsOptional()
|
||||
public deviceType: DeviceTypeEnum;
|
||||
}
|
||||
export class GetDevicesBySpaceOrCommunityDto {
|
||||
@ApiProperty({
|
||||
description: 'Device Product Type',
|
||||
@ -64,23 +72,3 @@ export class GetDevicesBySpaceOrCommunityDto {
|
||||
@IsNotEmpty({ message: 'Either spaceUuid or communityUuid must be provided' })
|
||||
requireEither?: never; // This ensures at least one of them is provided
|
||||
}
|
||||
|
||||
export class GetDevicesFilterDto {
|
||||
@ApiProperty({
|
||||
description: 'Device Type',
|
||||
enum: DeviceTypeEnum,
|
||||
required: false,
|
||||
})
|
||||
@IsEnum(DeviceTypeEnum)
|
||||
@IsOptional()
|
||||
public deviceType: DeviceTypeEnum;
|
||||
@ApiProperty({
|
||||
description: 'List of Space IDs to filter devices',
|
||||
required: false,
|
||||
example: ['60d21b4667d0d8992e610c85', '60d21b4967d0d8992e610c86'],
|
||||
})
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@IsUUID('4', { each: true })
|
||||
public spaces?: string[];
|
||||
}
|
||||
|
||||
@ -53,7 +53,7 @@ import { DeviceSceneParamDto } from '../dtos/device.param.dto';
|
||||
import {
|
||||
GetDeviceLogsDto,
|
||||
GetDevicesBySpaceOrCommunityDto,
|
||||
GetDevicesFilterDto,
|
||||
GetDoorLockDevices,
|
||||
} from '../dtos/get.device.dto';
|
||||
import {
|
||||
controlDeviceInterface,
|
||||
@ -955,20 +955,19 @@ export class DeviceService {
|
||||
|
||||
async getAllDevices(
|
||||
param: ProjectParam,
|
||||
{ deviceType, spaces }: GetDevicesFilterDto,
|
||||
query: GetDoorLockDevices,
|
||||
): Promise<BaseResponseDto> {
|
||||
try {
|
||||
await this.validateProject(param.projectUuid);
|
||||
if (deviceType === DeviceTypeEnum.DOOR_LOCK) {
|
||||
return await this.getDoorLockDevices(param.projectUuid, spaces);
|
||||
} else if (!deviceType) {
|
||||
if (query.deviceType === DeviceTypeEnum.DOOR_LOCK) {
|
||||
return await this.getDoorLockDevices(param.projectUuid);
|
||||
} else if (!query.deviceType) {
|
||||
const devices = await this.deviceRepository.find({
|
||||
where: {
|
||||
isActive: true,
|
||||
spaceDevice: {
|
||||
uuid: spaces && spaces.length ? In(spaces) : undefined,
|
||||
spaceName: Not(ORPHAN_SPACE_NAME),
|
||||
community: { project: { uuid: param.projectUuid } },
|
||||
spaceName: Not(ORPHAN_SPACE_NAME),
|
||||
},
|
||||
},
|
||||
relations: [
|
||||
@ -1564,7 +1563,7 @@ export class DeviceService {
|
||||
}
|
||||
}
|
||||
|
||||
async getDoorLockDevices(projectUuid: string, spaces?: string[]) {
|
||||
async getDoorLockDevices(projectUuid: string) {
|
||||
await this.validateProject(projectUuid);
|
||||
|
||||
const devices = await this.deviceRepository.find({
|
||||
@ -1574,7 +1573,6 @@ export class DeviceService {
|
||||
},
|
||||
spaceDevice: {
|
||||
spaceName: Not(ORPHAN_SPACE_NAME),
|
||||
uuid: spaces && spaces.length ? In(spaces) : undefined,
|
||||
community: {
|
||||
project: {
|
||||
uuid: projectUuid,
|
||||
|
||||
@ -26,6 +26,8 @@ async function bootstrap() {
|
||||
rateLimit({
|
||||
windowMs: 5 * 60 * 1000,
|
||||
max: 500,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
}),
|
||||
);
|
||||
|
||||
@ -34,7 +36,7 @@ async function bootstrap() {
|
||||
next();
|
||||
});
|
||||
|
||||
// app.getHttpAdapter().getInstance().set('trust proxy', 1);
|
||||
app.getHttpAdapter().getInstance().set('trust proxy', 1);
|
||||
|
||||
app.use(
|
||||
helmet({
|
||||
|
||||
@ -681,7 +681,7 @@ export class SpaceService {
|
||||
}
|
||||
}
|
||||
|
||||
buildSpaceHierarchy(spaces: SpaceEntity[]): SpaceEntity[] {
|
||||
private buildSpaceHierarchy(spaces: SpaceEntity[]): SpaceEntity[] {
|
||||
const map = new Map<string, SpaceEntity>();
|
||||
|
||||
// Step 1: Create a map of spaces by UUID
|
||||
|
||||
Reference in New Issue
Block a user