mirror of
https://github.com/SyncrowIOT/backend.git
synced 2025-08-26 03:39:40 +00:00
Compare commits
223 Commits
daily-aqi-
...
e468818a26
Author | SHA1 | Date | |
---|---|---|---|
e468818a26 | |||
fd5fbf57d8 | |||
c0b95a8b4b | |||
efa53da8c5 | |||
85687e7950 | |||
7e2c3136cf | |||
61348aa351 | |||
dea942f11e | |||
d62e620828 | |||
f0556813ac | |||
6d2252a403 | |||
8d265c9105 | |||
a4095c837b | |||
65d4a56135 | |||
fffa27b6ee | |||
12579fcd6e | |||
036c8bea17 | |||
e5970c02c1 | |||
57cf110229 | |||
2e959a6ef3 | |||
a6151220e2 | |||
f4ba9c9bda | |||
e91966c8f8 | |||
a9cb1b6704 | |||
a17a271213 | |||
712b7443ac | |||
945328c0ce | |||
009deaf403 | |||
3cfed2b452 | |||
09322c5b80 | |||
74d3620d0e | |||
ab59a310d9 | |||
30166810ca | |||
805c5dd180 | |||
e4ba7d46bb | |||
83be80d9f6 | |||
2589e391ed | |||
5cf45c30f4 | |||
0bb178ed10 | |||
ef21b589c0 | |||
44f83ea54e | |||
e4694db79c | |||
13064296a7 | |||
a269f833bc | |||
9971fb953d | |||
7a07f39f16 | |||
18b21d697c | |||
66391bafd8 | |||
fbf62fcd66 | |||
25599b9fe2 | |||
807c5b7dd3 | |||
60bc03cf79 | |||
a9eaf44d31 | |||
d232c06ebe | |||
5c916ed445 | |||
b2fb378e52 | |||
c5f8f96977 | |||
8f9b15f49f | |||
0b9eef276e | |||
b3f8b92826 | |||
b9da00aaa6 | |||
5bf44a18e1 | |||
2b2772e4ca | |||
13c0f87fc6 | |||
374fb69804 | |||
c9d794d988 | |||
5d4e5ca87e | |||
f4e748d735 | |||
f4f7999ae0 | |||
82c82d521c | |||
d4d1ec817d | |||
c7a4ff1194 | |||
90ab291d83 | |||
8a4633b158 | |||
f80d097ff8 | |||
04bd156df1 | |||
731819aeaa | |||
68d2d3b53d | |||
3fcfe2d92f | |||
c0a069b460 | |||
5381a949bc | |||
30724d7d37 | |||
324661e1ee | |||
a83424f45b | |||
71f6ccb4db | |||
68692b7c8b | |||
4d60c1ed54 | |||
27dbe04299 | |||
9bebcb2f3e | |||
43ab0030f0 | |||
c48adb73b5 | |||
d255e6811e | |||
e58d2d4831 | |||
147cf0b582 | |||
4e6b6f6ac5 | |||
932a3efd1c | |||
0a1ccad120 | |||
f337e6c681 | |||
f5bf857071 | |||
d1d4d529a8 | |||
37b582f521 | |||
cf19f08dca | |||
ff370b2baa | |||
04f64407e1 | |||
d7eef5d03e | |||
c8d691b380 | |||
75d03366c2 | |||
52cb69cc84 | |||
a6053b3971 | |||
60d2c8330b | |||
fddd06e06d | |||
3160773c2a | |||
d3d84da5e3 | |||
110ed4157a | |||
aa9e90bf08 | |||
c5dd5e28fd | |||
603e74af09 | |||
6973e8b195 | |||
0e36f32ed6 | |||
705ceeba29 | |||
92d102d08f | |||
7dc28d0cb3 | |||
d9ad431a23 | |||
4bf43dab2b | |||
a37d5bb299 | |||
49cc762962 | |||
a94d4610ed | |||
274cdf741f | |||
df59e9a4a3 | |||
8c34c68ba6 | |||
689a38ee0c | |||
332b2f5851 | |||
8d44b66dd3 | |||
7520b8d9c7 | |||
72753b6dfb | |||
568eef8119 | |||
a91d0f22a4 | |||
0db060ae3f | |||
f2ed04f206 | |||
ea9a65178d | |||
8503ee728d | |||
4f5e1b23f6 | |||
2cb77504ca | |||
c86be27576 | |||
3a08f9f258 | |||
5c96a3b117 | |||
97e14e70f7 | |||
03d44cb14f | |||
0793441e06 | |||
b6321c2530 | |||
b8d34b0d9f | |||
c1065126aa | |||
1742454984 | |||
7eb13088ac | |||
7b97e50d2e | |||
4fb26fc131 | |||
ee0261d102 | |||
0d6de2df43 | |||
80e89dd035 | |||
466863e71f | |||
30aafdede6 | |||
01ce4d4b29 | |||
43dfaaa90d | |||
ea021ad228 | |||
cd3e9016f2 | |||
ef2245eae1 | |||
3ad81864d1 | |||
ab3efedc35 | |||
4a984ae5dd | |||
c39129f75b | |||
35ce13a67f | |||
12a9272b8b | |||
0fe6c80731 | |||
81e017430e | |||
191d0dfaf6 | |||
5b0135ba80 | |||
2fee8c055e | |||
59161d4049 | |||
b989338790 | |||
f5ed9d4fce | |||
3ac48183bd | |||
684205053d | |||
bfd92fdd87 | |||
dd54af5f46 | |||
90fc44ab53 | |||
efdf918159 | |||
25967d02f9 | |||
f44dc793a6 | |||
a40560a0b1 | |||
7d6f1bb944 | |||
434316fe51 | |||
287bb4c5e4 | |||
85602fa952 | |||
25a4d3e91b | |||
d3a560d18f | |||
ab99bb5afc | |||
67911d5ff1 | |||
13e3f3e213 | |||
327d678656 | |||
dd5447fc5f | |||
7df5b9ab08 | |||
06b4407b85 | |||
1d6f3b8e65 | |||
80659f7a48 | |||
4a5f2f3b9f | |||
a57f4e1c65 | |||
b2d52c7622 | |||
c9cbb2e085 | |||
8aa3de5fdc | |||
bc1ee9a53b | |||
19356c3833 | |||
8737ee992b | |||
e98a99be73 | |||
93efa15f3c | |||
c305e39ff2 | |||
61e4d220dc | |||
cd4bbe1788 | |||
d770a0c732 | |||
030e6ae902 | |||
9d8287b82b | |||
d741a6c1f3 | |||
6d55704dd4 | |||
d8ad9e55ea |
@ -21,6 +21,7 @@ module.exports = {
|
|||||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||||
'@typescript-eslint/no-explicit-any': 'off',
|
'@typescript-eslint/no-explicit-any': 'off',
|
||||||
|
"@typescript-eslint/no-unused-vars": 'warn',
|
||||||
},
|
},
|
||||||
settings: {
|
settings: {
|
||||||
'import/resolver': {
|
'import/resolver': {
|
||||||
|
17
.github/pull_request_template.md
vendored
Normal file
17
.github/pull_request_template.md
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
<!--
|
||||||
|
Thanks for contributing!
|
||||||
|
|
||||||
|
Provide a description of your changes below and a general summary in the title.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Jira Ticket
|
||||||
|
|
||||||
|
[SP-0000](https://syncrow.atlassian.net/browse/SP-0000)
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!--- Describe your changes in detail -->
|
||||||
|
|
||||||
|
## How to Test
|
||||||
|
|
||||||
|
<!--- Describe the created APIs / Logic -->
|
74
.github/workflows/main_syncrow(staging).yml
vendored
74
.github/workflows/main_syncrow(staging).yml
vendored
@ -1,4 +1,7 @@
|
|||||||
name: Backend deployment to Azure App Service
|
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy
|
||||||
|
# More GitHub Actions for Azure: https://github.com/Azure/actions
|
||||||
|
|
||||||
|
name: Build and deploy container app to Azure Web App - syncrow(staging)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@ -6,50 +9,43 @@ on:
|
|||||||
- main
|
- main
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
|
||||||
AZURE_WEB_APP_NAME: 'syncrow'
|
|
||||||
AZURE_WEB_APP_SLOT_NAME: 'staging'
|
|
||||||
ACR_REGISTRY: 'syncrow.azurecr.io'
|
|
||||||
IMAGE_NAME: 'backend'
|
|
||||||
IMAGE_TAG: 'latest'
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_and_deploy:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: 'ubuntu-latest'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Docker Buildx
|
||||||
uses: actions/setup-node@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
registry: https://syncrow.azurecr.io/
|
||||||
|
username: ${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}
|
||||||
|
password: ${{ secrets.AzureAppService_ContainerPassword_e7b0ff54f54d44cba04a970a22384848 }}
|
||||||
|
|
||||||
- name: Install dependencies and build project
|
- name: Build and push container image to registry
|
||||||
run: |
|
uses: docker/build-push-action@v3
|
||||||
npm install
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
- name: Log in to Azure
|
|
||||||
uses: azure/login@v1
|
|
||||||
with:
|
with:
|
||||||
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
push: true
|
||||||
|
tags: syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}
|
||||||
|
file: ./Dockerfile
|
||||||
|
|
||||||
- name: Log in to Azure Container Registry
|
deploy:
|
||||||
run: az acr login --name ${{ env.ACR_REGISTRY }}
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
environment:
|
||||||
|
name: 'staging'
|
||||||
|
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
|
||||||
|
|
||||||
- name: List build output
|
steps:
|
||||||
run: ls -R dist/
|
- name: Deploy to Azure Web App
|
||||||
|
id: deploy-to-webapp
|
||||||
- name: Build and push Docker image
|
uses: azure/webapps-deploy@v2
|
||||||
run: |
|
with:
|
||||||
docker build . -t ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
app-name: 'syncrow'
|
||||||
docker push ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
slot-name: 'staging'
|
||||||
|
publish-profile: ${{ secrets.AzureAppService_PublishProfile_44f7766441ec4796b74789e9761ef589 }}
|
||||||
- name: Set Web App with Docker container
|
images: 'syncrow.azurecr.io/${{ secrets.AzureAppService_ContainerUsername_47395803300340b49931ea82f6d80be3 }}/syncrow/backend:${{ github.sha }}'
|
||||||
run: |
|
|
||||||
az webapp config container set \
|
|
||||||
--name ${{ env.AZURE_WEB_APP_NAME }} \
|
|
||||||
--resource-group backend \
|
|
||||||
--docker-custom-image-name ${{ env.ACR_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} \
|
|
||||||
--docker-registry-server-url https://${{ env.ACR_REGISTRY }}
|
|
||||||
|
73
.github/workflows/main_syncrow(stg).yml
vendored
Normal file
73
.github/workflows/main_syncrow(stg).yml
vendored
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy
|
||||||
|
# More GitHub Actions for Azure: https://github.com/Azure/actions
|
||||||
|
|
||||||
|
name: Build and deploy Node.js app to Azure Web App - syncrow
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read #This is required for actions/checkout
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Node.js version
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: '20.x'
|
||||||
|
|
||||||
|
- name: npm install, build, and test
|
||||||
|
run: |
|
||||||
|
npm install
|
||||||
|
npm run build --if-present
|
||||||
|
npm run test --if-present
|
||||||
|
|
||||||
|
- name: Zip artifact for deployment
|
||||||
|
run: zip release.zip ./* -r
|
||||||
|
|
||||||
|
- name: Upload artifact for deployment job
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: node-app
|
||||||
|
path: release.zip
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
environment:
|
||||||
|
name: 'stg'
|
||||||
|
url: ${{ steps.deploy-to-webapp.outputs.webapp-url }}
|
||||||
|
permissions:
|
||||||
|
id-token: write #This is required for requesting the JWT
|
||||||
|
contents: read #This is required for actions/checkout
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download artifact from build job
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: node-app
|
||||||
|
|
||||||
|
- name: Unzip artifact for deployment
|
||||||
|
run: unzip release.zip
|
||||||
|
|
||||||
|
- name: Login to Azure
|
||||||
|
uses: azure/login@v2
|
||||||
|
with:
|
||||||
|
client-id: ${{ secrets.AZUREAPPSERVICE_CLIENTID_515C8E782CFF431AB20448C85CA0FE58 }}
|
||||||
|
tenant-id: ${{ secrets.AZUREAPPSERVICE_TENANTID_2AEFE5534424490387C08FAE41573CC2 }}
|
||||||
|
subscription-id: ${{ secrets.AZUREAPPSERVICE_SUBSCRIPTIONID_00623C33023749FEA5F6BC36884F9C8A }}
|
||||||
|
|
||||||
|
- name: 'Deploy to Azure Web App'
|
||||||
|
id: deploy-to-webapp
|
||||||
|
uses: azure/webapps-deploy@v3
|
||||||
|
with:
|
||||||
|
app-name: 'syncrow'
|
||||||
|
slot-name: 'stg'
|
||||||
|
package: .
|
64
.github/workflows/pr-description.yml
vendored
Normal file
64
.github/workflows/pr-description.yml
vendored
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
name: 🤖 AI PR Description Commenter (100% Safe with jq)
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, edited]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate-description:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout Repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install GitHub CLI and jq
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install gh jq -y
|
||||||
|
|
||||||
|
- name: Fetch PR Commits
|
||||||
|
id: fetch_commits
|
||||||
|
run: |
|
||||||
|
COMMITS=$(gh pr view ${{ github.event.pull_request.number }} --json commits --jq '.commits[].message' | sed 's/^/- /')
|
||||||
|
echo "commits<<EOF" >> $GITHUB_ENV
|
||||||
|
echo "$COMMITS" >> $GITHUB_ENV
|
||||||
|
echo "EOF" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||||
|
|
||||||
|
- name: Generate PR Description with OpenAI (Safe JSON with jq)
|
||||||
|
run: |
|
||||||
|
REQUEST_BODY=$(jq -n \
|
||||||
|
--arg model "gpt-4o" \
|
||||||
|
--arg content "Given the following commit messages:\n\n${commits}\n\nGenerate a clear and professional pull request description." \
|
||||||
|
'{
|
||||||
|
model: $model,
|
||||||
|
messages: [{ role: "user", content: $content }]
|
||||||
|
}'
|
||||||
|
)
|
||||||
|
|
||||||
|
RESPONSE=$(curl -s https://api.openai.com/v1/chat/completions \
|
||||||
|
-H "Authorization: Bearer $OPENAI_API_KEY" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$REQUEST_BODY")
|
||||||
|
|
||||||
|
DESCRIPTION=$(echo "$RESPONSE" | jq -r '.choices[0].message.content')
|
||||||
|
|
||||||
|
echo "---------- OpenAI Raw Response ----------"
|
||||||
|
echo "$RESPONSE"
|
||||||
|
echo "---------- Extracted Description ----------"
|
||||||
|
echo "$DESCRIPTION"
|
||||||
|
|
||||||
|
echo "description<<EOF" >> $GITHUB_ENV
|
||||||
|
echo "$DESCRIPTION" >> $GITHUB_ENV
|
||||||
|
echo "EOF" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||||
|
commits: ${{ env.commits }}
|
||||||
|
|
||||||
|
- name: Post AI Generated Description as Comment
|
||||||
|
run: |
|
||||||
|
gh pr comment ${{ github.event.pull_request.number }} --body "${{ env.description }}"
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
40
.github/workflows/production.yml
vendored
Normal file
40
.github/workflows/production.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
name: 🚀 Production Deployment
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: ⬇️ Checkout Code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: 🐢 Set up Node.js 20.x
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: 🐳 Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: 🔐 Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
|
with:
|
||||||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
aws-region: me-central-1
|
||||||
|
|
||||||
|
- name: 🗂️ Write .env file from ENV_FILE Secret
|
||||||
|
run: echo "${{ secrets.ENV_FILE }}" > .env
|
||||||
|
|
||||||
|
- name: 📦 Install Dependencies
|
||||||
|
run: npm install
|
||||||
|
|
||||||
|
- name: 🛠️ Run Production Build & Deploy Script
|
||||||
|
run: |
|
||||||
|
chmod +x ./build.sh
|
||||||
|
./build.sh
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -4,7 +4,7 @@
|
|||||||
/build
|
/build
|
||||||
|
|
||||||
#github
|
#github
|
||||||
/.github
|
/.github/workflows
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
logs
|
logs
|
||||||
@ -58,4 +58,9 @@ pids
|
|||||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||||
|
|
||||||
config.dev
|
config.dev
|
||||||
|
cdk.out
|
||||||
|
backend-cdk-new.out
|
||||||
|
web-cdk.out
|
||||||
|
backend-cdk.out
|
||||||
|
backend-cdk-final.out
|
22
Dockerfile
22
Dockerfile
@ -1,16 +1,28 @@
|
|||||||
FROM node:20-alpine
|
FROM --platform=linux/amd64 node:20-alpine
|
||||||
|
|
||||||
|
# curl for health checks
|
||||||
|
RUN apk add --no-cache curl
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
|
|
||||||
RUN npm install
|
RUN npm install --production --ignore-scripts
|
||||||
RUN npm install -g @nestjs/cli
|
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
EXPOSE 4000
|
RUN addgroup -g 1001 -S nodejs
|
||||||
|
RUN adduser -S nestjs -u 1001
|
||||||
|
|
||||||
CMD ["npm", "run", "start"]
|
RUN chown -R nestjs:nodejs /app
|
||||||
|
USER nestjs
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:3000/health || exit 1
|
||||||
|
|
||||||
|
CMD ["npm", "run", "start:prod"]
|
||||||
|
129
GITHUB_SETUP.md
Normal file
129
GITHUB_SETUP.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
# GitHub Actions Setup Guide
|
||||||
|
|
||||||
|
## Required GitHub Secrets
|
||||||
|
|
||||||
|
Add these secrets to your GitHub repository (Settings > Secrets and variables > Actions):
|
||||||
|
|
||||||
|
### AWS Credentials
|
||||||
|
|
||||||
|
```
|
||||||
|
AWS_ACCESS_KEY_ID=your-aws-access-key
|
||||||
|
AWS_SECRET_ACCESS_KEY=your-aws-secret-key
|
||||||
|
```
|
||||||
|
|
||||||
|
### JWT Configuration (CRITICAL - Generate secure random strings)
|
||||||
|
|
||||||
|
```
|
||||||
|
JWT_SECRET=your-super-secure-jwt-secret-key-here
|
||||||
|
JWT_SECRET_REFRESH=your-super-secure-refresh-secret-key-here
|
||||||
|
SECRET_KEY=your-general-encryption-secret-key-here
|
||||||
|
```
|
||||||
|
|
||||||
|
### Admin Configuration
|
||||||
|
|
||||||
|
```
|
||||||
|
SUPER_ADMIN_EMAIL=admin@syncrow.ae
|
||||||
|
SUPER_ADMIN_PASSWORD=YourSecureAdminPassword123!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Tuya IoT Configuration
|
||||||
|
|
||||||
|
```
|
||||||
|
TUYA_ACCESS_ID=your-tuya-access-id
|
||||||
|
TUYA_ACCESS_KEY=your-tuya-access-key
|
||||||
|
TRUN_ON_TUYA_SOCKET=true-or-false
|
||||||
|
```
|
||||||
|
|
||||||
|
### Firebase Configuration
|
||||||
|
|
||||||
|
```
|
||||||
|
FIREBASE_API_KEY=your-firebase-api-key
|
||||||
|
FIREBASE_AUTH_DOMAIN=your-project.firebaseapp.com
|
||||||
|
FIREBASE_PROJECT_ID=your-project-id
|
||||||
|
FIREBASE_STORAGE_BUCKET=your-project.appspot.com
|
||||||
|
FIREBASE_MESSAGING_SENDER_ID=your-sender-id
|
||||||
|
FIREBASE_APP_ID=your-app-id
|
||||||
|
FIREBASE_MEASUREMENT_ID=your-measurement-id
|
||||||
|
FIREBASE_DATABASE_URL=https://your-project.firebaseio.com
|
||||||
|
```
|
||||||
|
|
||||||
|
### Google OAuth
|
||||||
|
|
||||||
|
```
|
||||||
|
GOOGLE_CLIENT_ID=your-google-client-id
|
||||||
|
GOOGLE_CLIENT_SECRET=your-google-client-secret
|
||||||
|
```
|
||||||
|
|
||||||
|
### OneSignal Push Notifications
|
||||||
|
|
||||||
|
```
|
||||||
|
ONESIGNAL_APP_ID=your-onesignal-app-id
|
||||||
|
ONESIGNAL_API_KEY=your-onesignal-api-key
|
||||||
|
```
|
||||||
|
|
||||||
|
### Email Configuration (SMTP)
|
||||||
|
|
||||||
|
```
|
||||||
|
SMTP_HOST=your-smtp-host
|
||||||
|
SMTP_USER=your-smtp-username
|
||||||
|
SMTP_PASSWORD=your-smtp-password
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mailtrap Configuration
|
||||||
|
|
||||||
|
```
|
||||||
|
MAILTRAP_API_TOKEN=your-mailtrap-api-token
|
||||||
|
MAILTRAP_ENABLE_TEMPLATE_UUID=template-uuid
|
||||||
|
MAILTRAP_DISABLE_TEMPLATE_UUID=template-uuid
|
||||||
|
MAILTRAP_INVITATION_TEMPLATE_UUID=template-uuid
|
||||||
|
MAILTRAP_DELETE_USER_TEMPLATE_UUID=template-uuid
|
||||||
|
MAILTRAP_EDIT_USER_TEMPLATE_UUID=template-uuid
|
||||||
|
```
|
||||||
|
|
||||||
|
### Optional Services (leave empty if not used)
|
||||||
|
|
||||||
|
```
|
||||||
|
AZURE_REDIS_CONNECTIONSTRING=your-redis-connection-string
|
||||||
|
DOPPLER_PROJECT=your-doppler-project
|
||||||
|
DOPPLER_CONFIG=your-doppler-config
|
||||||
|
DOPPLER_ENVIRONMENT=your-doppler-environment
|
||||||
|
ACCESS_KEY=your-access-key
|
||||||
|
DOCKER_REGISTRY_SERVER_URL=your-registry-url
|
||||||
|
DOCKER_REGISTRY_SERVER_USERNAME=your-registry-username
|
||||||
|
DOCKER_REGISTRY_SERVER_PASSWORD=your-registry-password
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setup Steps
|
||||||
|
|
||||||
|
1. **Add AWS Credentials**
|
||||||
|
- Create IAM user with ECR, ECS, CloudFormation permissions
|
||||||
|
- Add AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY to GitHub Secrets
|
||||||
|
|
||||||
|
2. **Generate JWT Secrets**
|
||||||
|
- Use a secure random string generator
|
||||||
|
- Make JWT_SECRET and JWT_SECRET_REFRESH different values
|
||||||
|
- Keep these values secure and never share them
|
||||||
|
|
||||||
|
3. **Configure Services**
|
||||||
|
- Add secrets for each service you're using
|
||||||
|
- Leave unused services empty (they'll default to empty strings)
|
||||||
|
|
||||||
|
4. **Test Deployment**
|
||||||
|
- Push to master/main branch
|
||||||
|
- Check GitHub Actions tab for deployment status
|
||||||
|
- Verify API is accessible at https://api.syncos.syncrow.ae
|
||||||
|
|
||||||
|
## Security Notes
|
||||||
|
|
||||||
|
- Never commit secrets to the repository
|
||||||
|
- Use GitHub Secrets for all sensitive values
|
||||||
|
- Rotate secrets regularly
|
||||||
|
- Monitor GitHub Actions logs for any exposed values
|
||||||
|
- Database password is automatically managed by AWS Secrets Manager
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
- Check GitHub Actions logs for deployment errors
|
||||||
|
- Verify all required secrets are set
|
||||||
|
- Ensure AWS credentials have sufficient permissions
|
||||||
|
- Check ECS service logs in CloudWatch for runtime errors
|
39
README.md
39
README.md
@ -1,17 +1,19 @@
|
|||||||
# Backend
|
# Backend
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
This is the backend for an IoT application built using NestJS. It interfaces with the Tuya IoT cloud platform to manage homes, rooms, devices, ...etc.
|
This is the backend for an IoT application built using NestJS. It interfaces with the Tuya IoT cloud platform to manage homes, rooms, devices, ...etc.
|
||||||
This is the backend APIs project, developed with NestJS for Syncrow IOT Project.
|
This is the backend APIs project, developed with NestJS for Syncrow IOT Project.
|
||||||
|
|
||||||
## Database Model
|
## Database Model
|
||||||
|
|
||||||
The database uses PostgreSQL and TypeORM. Below is an entity relationship diagram:
|
The database uses PostgreSQL and TypeORM. Below is an entity relationship diagram:
|
||||||
|
|
||||||
The main entities are:
|
The main entities are:
|
||||||
|
|
||||||
User - Stores user account information
|
User - Stores user account information
|
||||||
Home - Represents a home/space
|
Home - Represents a home/space
|
||||||
Room - Represents a room/sub-space
|
Room - Represents a room/sub-space
|
||||||
Device - Represents a connected device
|
Device - Represents a connected device
|
||||||
Product - Stores metadata about device products
|
Product - Stores metadata about device products
|
||||||
Other Entities - sessions, OTPs, etc.
|
Other Entities - sessions, OTPs, etc.
|
||||||
@ -19,10 +21,11 @@ Other Entities - sessions, OTPs, etc.
|
|||||||
The entities have a one-to-many relationship - a user has multiple homes, a home has multiple rooms, and a room has multiple devices.
|
The entities have a one-to-many relationship - a user has multiple homes, a home has multiple rooms, and a room has multiple devices.
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
The application is deployed on Azure App Service using Docker containers. There are separate deployment slots for development, staging, and production environments.
|
The application is deployed on Azure App Service using Docker containers. There are separate deployment slots for development, staging, and production environments.
|
||||||
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
First, ensure that you have Node.js `v20.11` or newer (LTS ONLY) installed on your system.
|
First, ensure that you have Node.js `v20.11` or newer (LTS ONLY) installed on your system.
|
||||||
|
|
||||||
To install the project dependencies, run the following command in the project root directory:
|
To install the project dependencies, run the following command in the project root directory:
|
||||||
@ -61,8 +64,8 @@ $ npm run test:cov
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
+----------------------------------+
|
+----------------------------------+
|
||||||
| |
|
| |
|
||||||
| Applications |
|
| Applications |
|
||||||
@ -107,3 +110,29 @@ $ npm run test:cov
|
|||||||
| | Standby Node | | |
|
| | Standby Node | | |
|
||||||
| +------------------+----------------+ |
|
| +------------------+----------------+ |
|
||||||
+-----------------------------------------------------------------+
|
+-----------------------------------------------------------------+
|
||||||
|
|
||||||
|
## CDK Deployment
|
||||||
|
|
||||||
|
• Bootstrap CDK (first time only): npx cdk bootstrap aws://482311766496/me-central-1
|
||||||
|
• List available stacks: npx cdk list
|
||||||
|
• Deploy infrastructure: npx cdk deploy --require-approval never
|
||||||
|
• View changes before deploy: npx cdk diff
|
||||||
|
• Generate CloudFormation template: npx cdk synth
|
||||||
|
• Destroy infrastructure: npx cdk destroy
|
||||||
|
• Environment variables are configured in infrastructure/stack.ts
|
||||||
|
• After code changes: build Docker image, push to ECR, force ECS deployment
|
||||||
|
• Database seeding happens automatically on first deployment with DB_SYNC=true
|
||||||
|
• Admin credentials: admin@syncrow.ae / YourSecureAdminPassword123!
|
||||||
|
• Production API: https://api.syncos.syncrow.ae
|
||||||
|
• Health check: https://api.syncos.syncrow.ae/health
|
||||||
|
|
||||||
|
## GitHub Actions Deployment
|
||||||
|
|
||||||
|
• Automatic deployment on push to master/main branch
|
||||||
|
• Configure GitHub Secrets (see GITHUB_SETUP.md for complete list)
|
||||||
|
• Required secrets: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, JWT_SECRET, JWT_SECRET_REFRESH
|
||||||
|
• Workflow builds Docker image, pushes to ECR, and deploys CDK stack
|
||||||
|
• Environment variables are passed securely via GitHub Secrets
|
||||||
|
• Manual deployment: Go to Actions tab and run "Deploy Backend to AWS" workflow
|
||||||
|
• Check deployment status in GitHub Actions tab
|
||||||
|
• Logs available in CloudWatch under /ecs/syncrow-backend log group
|
||||||
|
46
build.sh
Normal file
46
build.sh
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text)
|
||||||
|
REGION=${AWS_DEFAULT_REGION:-me-central-1}
|
||||||
|
REPO_NAME=syncrow-backend
|
||||||
|
IMAGE_TAG=latest
|
||||||
|
CLUSTER_NAME=syncrow-backend-cluster
|
||||||
|
STACK_NAME=SyncrowBackendStack
|
||||||
|
CERTIFICATE_ARN="arn:aws:acm:$REGION:$ACCOUNT_ID:certificate/bea1e2ae-84a1-414e-8dbf-4599397e7ed0"
|
||||||
|
|
||||||
|
echo "🔐 Logging into ECR..."
|
||||||
|
aws ecr get-login-password --region $REGION | docker login --username AWS --password-stdin "$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com"
|
||||||
|
|
||||||
|
echo "🐳 Building Docker image..."
|
||||||
|
docker build --platform=linux/amd64 -t $REPO_NAME .
|
||||||
|
|
||||||
|
echo "🏷️ Tagging image..."
|
||||||
|
docker tag $REPO_NAME:$IMAGE_TAG "$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/$REPO_NAME:$IMAGE_TAG"
|
||||||
|
|
||||||
|
echo "📤 Pushing image to ECR..."
|
||||||
|
docker push "$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/$REPO_NAME:$IMAGE_TAG"
|
||||||
|
|
||||||
|
echo "🔍 Checking if ECS service exists..."
|
||||||
|
SERVICE_ARN=$(aws ecs list-services \
|
||||||
|
--cluster $CLUSTER_NAME \
|
||||||
|
--query 'serviceArns[0]' \
|
||||||
|
--output text \
|
||||||
|
--region $REGION 2>/dev/null || echo "")
|
||||||
|
|
||||||
|
echo "📦 Deploying CDK Stack..."
|
||||||
|
npx cdk deploy $STACK_NAME \
|
||||||
|
--context certificateArn=$CERTIFICATE_ARN \
|
||||||
|
--require-approval never
|
||||||
|
|
||||||
|
if [[ "$SERVICE_ARN" != "" && "$SERVICE_ARN" != "None" ]]; then
|
||||||
|
SERVICE_NAME=$(basename "$SERVICE_ARN")
|
||||||
|
echo "🚀 Redeploying ECS Service: $SERVICE_NAME"
|
||||||
|
aws ecs update-service \
|
||||||
|
--cluster $CLUSTER_NAME \
|
||||||
|
--service $SERVICE_NAME \
|
||||||
|
--force-new-deployment \
|
||||||
|
--region $REGION
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ All done."
|
29
cdk.context.json
Normal file
29
cdk.context.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"availability-zones:account=426265406140:region=us-east-2": [
|
||||||
|
"us-east-2a",
|
||||||
|
"us-east-2b",
|
||||||
|
"us-east-2c"
|
||||||
|
],
|
||||||
|
"availability-zones:account=482311766496:region=us-east-2": [
|
||||||
|
"us-east-2a",
|
||||||
|
"us-east-2b",
|
||||||
|
"us-east-2c"
|
||||||
|
],
|
||||||
|
"hosted-zone:account=482311766496:domainName=syncrow.me:region=us-east-2": {
|
||||||
|
"Id": "/hostedzone/Z02085662NLJECF4DGJV3",
|
||||||
|
"Name": "syncrow.me."
|
||||||
|
},
|
||||||
|
"availability-zones:account=482311766496:region=me-central-1": [
|
||||||
|
"me-central-1a",
|
||||||
|
"me-central-1b",
|
||||||
|
"me-central-1c"
|
||||||
|
],
|
||||||
|
"hosted-zone:account=482311766496:domainName=syncrow.me:region=me-central-1": {
|
||||||
|
"Id": "/hostedzone/Z02085662NLJECF4DGJV3",
|
||||||
|
"Name": "syncrow.me."
|
||||||
|
},
|
||||||
|
"hosted-zone:account=482311766496:domainName=syncrow.ae:region=me-central-1": {
|
||||||
|
"Id": "/hostedzone/Z01153152LRHQTA1370P4",
|
||||||
|
"Name": "syncrow.ae."
|
||||||
|
}
|
||||||
|
}
|
58
cdk.json
Normal file
58
cdk.json
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
{
|
||||||
|
"app": "npx ts-node --prefer-ts-exts infrastructure/app.ts",
|
||||||
|
"watch": {
|
||||||
|
"include": [
|
||||||
|
"**"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"README.md",
|
||||||
|
"cdk*.json",
|
||||||
|
"**/*.d.ts",
|
||||||
|
"**/*.js",
|
||||||
|
"tsconfig.json",
|
||||||
|
"package*.json",
|
||||||
|
"yarn.lock",
|
||||||
|
"node_modules",
|
||||||
|
"test"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"context": {
|
||||||
|
"@aws-cdk/aws-lambda:recognizeLayerVersion": true,
|
||||||
|
"@aws-cdk/core:checkSecretUsage": true,
|
||||||
|
"@aws-cdk/core:target-partitions": [
|
||||||
|
"aws",
|
||||||
|
"aws-cn"
|
||||||
|
],
|
||||||
|
"@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
|
||||||
|
"@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
|
||||||
|
"@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true,
|
||||||
|
"@aws-cdk/aws-iam:minimizePolicies": true,
|
||||||
|
"@aws-cdk/core:validateSnapshotRemovalPolicy": true,
|
||||||
|
"@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true,
|
||||||
|
"@aws-cdk/aws-s3:createDefaultLoggingPolicy": true,
|
||||||
|
"@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true,
|
||||||
|
"@aws-cdk/aws-apigateway:disableCloudWatchRole": true,
|
||||||
|
"@aws-cdk/core:enablePartitionLiterals": true,
|
||||||
|
"@aws-cdk/aws-events:eventsTargetQueueSameAccount": true,
|
||||||
|
"@aws-cdk/aws-iam:standardizedServicePrincipals": true,
|
||||||
|
"@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true,
|
||||||
|
"@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true,
|
||||||
|
"@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true,
|
||||||
|
"@aws-cdk/aws-route53-patters:useCertificate": true,
|
||||||
|
"@aws-cdk/customresources:installLatestAwsSdkDefault": false,
|
||||||
|
"@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true,
|
||||||
|
"@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true,
|
||||||
|
"@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true,
|
||||||
|
"@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true,
|
||||||
|
"@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true,
|
||||||
|
"@aws-cdk/aws-redshift:columnId": true,
|
||||||
|
"@aws-cdk/aws-stepfunctions-tasks:enableLogging": true,
|
||||||
|
"@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true,
|
||||||
|
"@aws-cdk/aws-apigateway:requestValidatorUniqueId": true,
|
||||||
|
"@aws-cdk/aws-kms:aliasNameRef": true,
|
||||||
|
"@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true,
|
||||||
|
"@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true,
|
||||||
|
"@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true,
|
||||||
|
"@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForSourceAction": true
|
||||||
|
}
|
||||||
|
}
|
22
deploy.sh
Executable file
22
deploy.sh
Executable file
@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text)
|
||||||
|
REGION=${AWS_DEFAULT_REGION:-me-central-1}
|
||||||
|
|
||||||
|
npx cdk deploy SyncrowBackendStack --context certificateArn=arn:aws:acm:me-central-1:482311766496:certificate/bea1e2ae-84a1-414e-8dbf-4599397e7ed0 --require-approval never
|
||||||
|
|
||||||
|
aws ecr get-login-password --region $REGION | docker login --username AWS --password-stdin $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com
|
||||||
|
|
||||||
|
docker build --platform=linux/amd64 -t syncrow-backend .
|
||||||
|
docker tag syncrow-backend:latest $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/syncrow-backend:latest
|
||||||
|
docker push $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/syncrow-backend:latest
|
||||||
|
|
||||||
|
SERVICE_ARN=$(aws ecs list-services --cluster syncrow-backend-cluster --query 'serviceArns[0]' --output text --region $REGION 2>/dev/null || echo "")
|
||||||
|
|
||||||
|
if [ "$SERVICE_ARN" != "" ] && [ "$SERVICE_ARN" != "None" ]; then
|
||||||
|
SERVICE_NAME=$(echo $SERVICE_ARN | cut -d'/' -f3)
|
||||||
|
aws ecs update-service --cluster syncrow-backend-cluster --service $SERVICE_NAME --force-new-deployment --region $REGION
|
||||||
|
else
|
||||||
|
npx cdk deploy SyncrowBackendStack --context certificateArn=arn:aws:acm:me-central-1:482311766496:certificate/bea1e2ae-84a1-414e-8dbf-4599397e7ed0 --require-approval never
|
||||||
|
fi
|
16
infrastructure/app.ts
Normal file
16
infrastructure/app.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
import * as cdk from 'aws-cdk-lib';
|
||||||
|
import 'source-map-support/register';
|
||||||
|
import { BackendStack } from './stack';
|
||||||
|
|
||||||
|
const app = new cdk.App();
|
||||||
|
|
||||||
|
new BackendStack(app, 'SyncrowBackendStack', {
|
||||||
|
env: {
|
||||||
|
account: process.env.CDK_DEFAULT_ACCOUNT,
|
||||||
|
region: 'me-central-1',
|
||||||
|
},
|
||||||
|
databaseName: 'postgres',
|
||||||
|
certificateArn:
|
||||||
|
'arn:aws:acm:me-central-1:482311766496:certificate/423b343e-402b-4978-89bd-cda25f7a8873',
|
||||||
|
});
|
393
infrastructure/stack.ts
Normal file
393
infrastructure/stack.ts
Normal file
@ -0,0 +1,393 @@
|
|||||||
|
import * as cdk from 'aws-cdk-lib';
|
||||||
|
import * as acm from 'aws-cdk-lib/aws-certificatemanager';
|
||||||
|
import * as ec2 from 'aws-cdk-lib/aws-ec2';
|
||||||
|
import * as ecr from 'aws-cdk-lib/aws-ecr';
|
||||||
|
import * as ecs from 'aws-cdk-lib/aws-ecs';
|
||||||
|
import * as ecsPatterns from 'aws-cdk-lib/aws-ecs-patterns';
|
||||||
|
import * as elbv2 from 'aws-cdk-lib/aws-elasticloadbalancingv2';
|
||||||
|
import * as logs from 'aws-cdk-lib/aws-logs';
|
||||||
|
import * as rds from 'aws-cdk-lib/aws-rds';
|
||||||
|
import * as route53 from 'aws-cdk-lib/aws-route53';
|
||||||
|
import { Construct } from 'constructs';
|
||||||
|
import * as dotenv from 'dotenv';
|
||||||
|
|
||||||
|
export interface BackendStackProps extends cdk.StackProps {
|
||||||
|
vpcId?: string;
|
||||||
|
databaseName?: string;
|
||||||
|
certificateArn?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BackendStack extends cdk.Stack {
|
||||||
|
public readonly apiUrl: string;
|
||||||
|
public readonly databaseEndpoint: string;
|
||||||
|
public readonly vpc: ec2.IVpc;
|
||||||
|
|
||||||
|
constructor(scope: Construct, id: string, props?: BackendStackProps) {
|
||||||
|
super(scope, id, props);
|
||||||
|
|
||||||
|
// Load environment variables from .env file
|
||||||
|
dotenv.config({ path: '.env' });
|
||||||
|
|
||||||
|
// VPC - either use existing or create new
|
||||||
|
this.vpc = props?.vpcId
|
||||||
|
? ec2.Vpc.fromLookup(this, 'ExistingVpc', { vpcId: props.vpcId })
|
||||||
|
: new ec2.Vpc(this, 'SyncrowVpc', {
|
||||||
|
maxAzs: 2,
|
||||||
|
natGateways: 1,
|
||||||
|
subnetConfiguration: [
|
||||||
|
{
|
||||||
|
cidrMask: 24,
|
||||||
|
name: 'public',
|
||||||
|
subnetType: ec2.SubnetType.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cidrMask: 24,
|
||||||
|
name: 'private',
|
||||||
|
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Security Groups
|
||||||
|
const dbSecurityGroup = new ec2.SecurityGroup(
|
||||||
|
this,
|
||||||
|
'DatabaseSecurityGroup',
|
||||||
|
{
|
||||||
|
vpc: this.vpc,
|
||||||
|
description: 'Security group for RDS PostgreSQL',
|
||||||
|
allowAllOutbound: false,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const ecsSecurityGroup = new ec2.SecurityGroup(this, 'EcsSecurityGroup', {
|
||||||
|
vpc: this.vpc,
|
||||||
|
description: 'Security group for ECS Fargate service',
|
||||||
|
allowAllOutbound: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const albSecurityGroup = new ec2.SecurityGroup(this, 'AlbSecurityGroup', {
|
||||||
|
vpc: this.vpc,
|
||||||
|
description: 'Security group for Application Load Balancer',
|
||||||
|
allowAllOutbound: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Allow ALB to connect to ECS
|
||||||
|
ecsSecurityGroup.addIngressRule(
|
||||||
|
albSecurityGroup,
|
||||||
|
ec2.Port.tcp(3000),
|
||||||
|
'Allow ALB to connect to ECS service',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Allow ECS to connect to RDS
|
||||||
|
dbSecurityGroup.addIngressRule(
|
||||||
|
ecsSecurityGroup,
|
||||||
|
ec2.Port.tcp(5432),
|
||||||
|
'Allow ECS to connect to PostgreSQL',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Temporary access for admin IP
|
||||||
|
dbSecurityGroup.addIngressRule(
|
||||||
|
ec2.Peer.ipv4('216.126.231.231/32'),
|
||||||
|
ec2.Port.tcp(5432),
|
||||||
|
'Temporary access from admin IP',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Allow HTTP/HTTPS traffic to ALB
|
||||||
|
albSecurityGroup.addIngressRule(
|
||||||
|
ec2.Peer.anyIpv4(),
|
||||||
|
ec2.Port.tcp(80),
|
||||||
|
'Allow HTTP traffic',
|
||||||
|
);
|
||||||
|
albSecurityGroup.addIngressRule(
|
||||||
|
ec2.Peer.anyIpv4(),
|
||||||
|
ec2.Port.tcp(443),
|
||||||
|
'Allow HTTPS traffic',
|
||||||
|
);
|
||||||
|
|
||||||
|
const dbCluster = rds.DatabaseCluster.fromDatabaseClusterAttributes(
|
||||||
|
this,
|
||||||
|
'SyncrowDatabase',
|
||||||
|
{
|
||||||
|
clusterIdentifier: 'syncrow-backend',
|
||||||
|
instanceIdentifiers: ['syncrowdatabase-instance-1'],
|
||||||
|
engine: rds.DatabaseClusterEngine.auroraPostgres({
|
||||||
|
version: rds.AuroraPostgresEngineVersion.VER_16_6,
|
||||||
|
}),
|
||||||
|
port: 5432,
|
||||||
|
securityGroups: [
|
||||||
|
ec2.SecurityGroup.fromSecurityGroupId(
|
||||||
|
this,
|
||||||
|
'ImportedDbSecurityGroup',
|
||||||
|
'sg-07e163f588b2bac25',
|
||||||
|
),
|
||||||
|
],
|
||||||
|
clusterEndpointAddress:
|
||||||
|
'syncrow-backend.cluster-criskv1sdkq4.me-central-1.rds.amazonaws.com',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Import the existing database secret separately
|
||||||
|
const dbSecret = rds.DatabaseSecret.fromSecretCompleteArn(
|
||||||
|
this,
|
||||||
|
'ImportedDbSecret',
|
||||||
|
'arn:aws:secretsmanager:me-central-1:482311766496:secret:rds!cluster-43ec14cd-9301-43e2-aa79-d330a429a126-v0JDQN',
|
||||||
|
);
|
||||||
|
|
||||||
|
// ECR Repository for Docker images - import existing repository
|
||||||
|
const ecrRepository = ecr.Repository.fromRepositoryName(
|
||||||
|
this,
|
||||||
|
'SyncrowBackendRepo',
|
||||||
|
'syncrow-backend',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Output the correct ECR URI for this region
|
||||||
|
new cdk.CfnOutput(this, 'EcrRepositoryUriRegional', {
|
||||||
|
value: ecrRepository.repositoryUri,
|
||||||
|
description: `ECR Repository URI in region ${this.region}`,
|
||||||
|
exportName: `${this.stackName}-EcrRepositoryUriRegional`,
|
||||||
|
});
|
||||||
|
|
||||||
|
// ECS Cluster
|
||||||
|
const cluster = new ecs.Cluster(this, 'SyncrowCluster', {
|
||||||
|
vpc: this.vpc,
|
||||||
|
clusterName: 'syncrow-backend-cluster',
|
||||||
|
});
|
||||||
|
|
||||||
|
// CloudWatch Log Group
|
||||||
|
const logGroup = new logs.LogGroup(this, 'SyncrowBackendLogs', {
|
||||||
|
logGroupName: '/ecs/syncrow-backend',
|
||||||
|
retention: logs.RetentionDays.ONE_WEEK,
|
||||||
|
removalPolicy: cdk.RemovalPolicy.DESTROY,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Use existing wildcard certificate or create new one
|
||||||
|
const apiCertificate = props?.certificateArn
|
||||||
|
? acm.Certificate.fromCertificateArn(
|
||||||
|
this,
|
||||||
|
'ApiCertificate',
|
||||||
|
props.certificateArn,
|
||||||
|
)
|
||||||
|
: new acm.Certificate(this, 'ApiCertificate', {
|
||||||
|
domainName: 'api.syncos.syncrow.ae',
|
||||||
|
validation: acm.CertificateValidation.fromDns(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// ECS Fargate Service with Application Load Balancer
|
||||||
|
const fargateService =
|
||||||
|
new ecsPatterns.ApplicationLoadBalancedFargateService(
|
||||||
|
this,
|
||||||
|
'SyncrowBackendService',
|
||||||
|
{
|
||||||
|
cluster,
|
||||||
|
memoryLimitMiB: 1024,
|
||||||
|
cpu: 512,
|
||||||
|
desiredCount: 2,
|
||||||
|
domainName: 'api.syncos.syncrow.ae',
|
||||||
|
domainZone: route53.HostedZone.fromLookup(this, 'SyncrowZone', {
|
||||||
|
domainName: 'syncrow.ae',
|
||||||
|
}),
|
||||||
|
certificate: apiCertificate,
|
||||||
|
protocol: elbv2.ApplicationProtocol.HTTPS,
|
||||||
|
redirectHTTP: true,
|
||||||
|
taskImageOptions: {
|
||||||
|
image: ecs.ContainerImage.fromEcrRepository(
|
||||||
|
ecrRepository,
|
||||||
|
'latest',
|
||||||
|
),
|
||||||
|
containerPort: 3000,
|
||||||
|
enableLogging: true,
|
||||||
|
environment: {
|
||||||
|
// App settings
|
||||||
|
NODE_ENV: process.env.NODE_ENV || 'production',
|
||||||
|
PORT: process.env.PORT || '3000',
|
||||||
|
BASE_URL: process.env.BASE_URL || '',
|
||||||
|
|
||||||
|
// Database connection (CDK provides these automatically)
|
||||||
|
AZURE_POSTGRESQL_HOST: dbCluster.clusterEndpoint.hostname,
|
||||||
|
AZURE_POSTGRESQL_PORT: '5432',
|
||||||
|
AZURE_POSTGRESQL_DATABASE: props?.databaseName || 'postgres',
|
||||||
|
AZURE_POSTGRESQL_USER: 'postgres',
|
||||||
|
AZURE_POSTGRESQL_SSL: process.env.AZURE_POSTGRESQL_SSL || 'false',
|
||||||
|
AZURE_POSTGRESQL_SYNC:
|
||||||
|
process.env.AZURE_POSTGRESQL_SYNC || 'false',
|
||||||
|
|
||||||
|
// JWT Configuration - CRITICAL: These must be set
|
||||||
|
JWT_SECRET:
|
||||||
|
process.env.JWT_SECRET ||
|
||||||
|
'syncrow-jwt-secret-key-2025-production-environment-very-secure-random-string',
|
||||||
|
JWT_SECRET_REFRESH:
|
||||||
|
process.env.JWT_SECRET_REFRESH ||
|
||||||
|
'syncrow-refresh-secret-key-2025-production-environment-different-secure-string',
|
||||||
|
JWT_EXPIRE_TIME: process.env.JWT_EXPIRE_TIME || '1h',
|
||||||
|
JWT_EXPIRE_TIME_REFRESH:
|
||||||
|
process.env.JWT_EXPIRE_TIME_REFRESH || '7d',
|
||||||
|
|
||||||
|
// Firebase Configuration
|
||||||
|
FIREBASE_API_KEY: process.env.FIREBASE_API_KEY || '',
|
||||||
|
FIREBASE_AUTH_DOMAIN: process.env.FIREBASE_AUTH_DOMAIN || '',
|
||||||
|
FIREBASE_PROJECT_ID: process.env.FIREBASE_PROJECT_ID || '',
|
||||||
|
FIREBASE_STORAGE_BUCKET:
|
||||||
|
process.env.FIREBASE_STORAGE_BUCKET || '',
|
||||||
|
FIREBASE_MESSAGING_SENDER_ID:
|
||||||
|
process.env.FIREBASE_MESSAGING_SENDER_ID || '',
|
||||||
|
FIREBASE_APP_ID: process.env.FIREBASE_APP_ID || '',
|
||||||
|
FIREBASE_MEASUREMENT_ID:
|
||||||
|
process.env.FIREBASE_MEASUREMENT_ID || '',
|
||||||
|
FIREBASE_DATABASE_URL: process.env.FIREBASE_DATABASE_URL || '',
|
||||||
|
|
||||||
|
// Tuya IoT Configuration
|
||||||
|
TUYA_EU_URL:
|
||||||
|
process.env.TUYA_EU_URL || 'https://openapi.tuyaeu.com',
|
||||||
|
TUYA_ACCESS_ID: process.env.TUYA_ACCESS_ID || '',
|
||||||
|
TUYA_ACCESS_KEY: process.env.TUYA_ACCESS_KEY || '',
|
||||||
|
TRUN_ON_TUYA_SOCKET: process.env.TRUN_ON_TUYA_SOCKET || '',
|
||||||
|
|
||||||
|
// Email Configuration
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST || '',
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT || '587',
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE || 'true',
|
||||||
|
SMTP_USER: process.env.SMTP_USER || '',
|
||||||
|
SMTP_PASSWORD: process.env.SMTP_PASSWORD || '',
|
||||||
|
|
||||||
|
// Mailtrap Configuration
|
||||||
|
MAILTRAP_API_TOKEN: process.env.MAILTRAP_API_TOKEN || '',
|
||||||
|
MAILTRAP_INVITATION_TEMPLATE_UUID:
|
||||||
|
process.env.MAILTRAP_INVITATION_TEMPLATE_UUID || '',
|
||||||
|
MAILTRAP_EDIT_USER_TEMPLATE_UUID:
|
||||||
|
process.env.MAILTRAP_EDIT_USER_TEMPLATE_UUID || '',
|
||||||
|
MAILTRAP_DISABLE_TEMPLATE_UUID:
|
||||||
|
process.env.MAILTRAP_DISABLE_TEMPLATE_UUID || '',
|
||||||
|
MAILTRAP_ENABLE_TEMPLATE_UUID:
|
||||||
|
process.env.MAILTRAP_ENABLE_TEMPLATE_UUID || '',
|
||||||
|
MAILTRAP_DELETE_USER_TEMPLATE_UUID:
|
||||||
|
process.env.MAILTRAP_DELETE_USER_TEMPLATE_UUID || '',
|
||||||
|
|
||||||
|
// OneSignal Push Notifications
|
||||||
|
ONESIGNAL_APP_ID: process.env.ONESIGNAL_APP_ID || '',
|
||||||
|
ONESIGNAL_API_KEY: process.env.ONESIGNAL_API_KEY || '',
|
||||||
|
|
||||||
|
// Admin Configuration
|
||||||
|
SUPER_ADMIN_EMAIL:
|
||||||
|
process.env.SUPER_ADMIN_EMAIL || 'admin@yourdomain.com',
|
||||||
|
SUPER_ADMIN_PASSWORD:
|
||||||
|
process.env.SUPER_ADMIN_PASSWORD ||
|
||||||
|
'YourSecureAdminPassword123!',
|
||||||
|
|
||||||
|
// Google OAuth
|
||||||
|
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID || '',
|
||||||
|
GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET || '',
|
||||||
|
|
||||||
|
// Other Configuration
|
||||||
|
OTP_LIMITER: process.env.OTP_LIMITER || '5',
|
||||||
|
SECRET_KEY:
|
||||||
|
process.env.SECRET_KEY ||
|
||||||
|
'another-random-secret-key-for-general-encryption',
|
||||||
|
ACCESS_KEY: process.env.ACCESS_KEY || '',
|
||||||
|
DB_SYNC: process.env.DB_SYNC || 'txsrue',
|
||||||
|
|
||||||
|
// Redis (used?)
|
||||||
|
AZURE_REDIS_CONNECTIONSTRING:
|
||||||
|
process.env.AZURE_REDIS_CONNECTIONSTRING || '',
|
||||||
|
|
||||||
|
// Docker Registry (for deployment)
|
||||||
|
DOCKER_REGISTRY_SERVER_URL:
|
||||||
|
process.env.DOCKER_REGISTRY_SERVER_URL || '',
|
||||||
|
DOCKER_REGISTRY_SERVER_USERNAME:
|
||||||
|
process.env.DOCKER_REGISTRY_SERVER_USERNAME || '',
|
||||||
|
DOCKER_REGISTRY_SERVER_PASSWORD:
|
||||||
|
process.env.DOCKER_REGISTRY_SERVER_PASSWORD || '',
|
||||||
|
|
||||||
|
// Doppler (if used for secrets management)
|
||||||
|
DOPPLER_PROJECT: process.env.DOPPLER_PROJECT || '',
|
||||||
|
DOPPLER_CONFIG: process.env.DOPPLER_CONFIG || '',
|
||||||
|
DOPPLER_ENVIRONMENT: process.env.DOPPLER_ENVIRONMENT || '',
|
||||||
|
|
||||||
|
// Azure specific
|
||||||
|
WEBSITES_ENABLE_APP_SERVICE_STORAGE:
|
||||||
|
process.env.WEBSITES_ENABLE_APP_SERVICE_STORAGE || 'false',
|
||||||
|
},
|
||||||
|
secrets: {
|
||||||
|
AZURE_POSTGRESQL_PASSWORD: ecs.Secret.fromSecretsManager(
|
||||||
|
dbSecret,
|
||||||
|
'password',
|
||||||
|
),
|
||||||
|
},
|
||||||
|
logDriver: ecs.LogDrivers.awsLogs({
|
||||||
|
streamPrefix: 'syncrow-backend',
|
||||||
|
logGroup,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
publicLoadBalancer: true,
|
||||||
|
securityGroups: [ecsSecurityGroup],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add security group to load balancer after creation
|
||||||
|
fargateService.loadBalancer.addSecurityGroup(albSecurityGroup);
|
||||||
|
|
||||||
|
// Configure health check
|
||||||
|
fargateService.targetGroup.configureHealthCheck({
|
||||||
|
path: '/health',
|
||||||
|
healthyHttpCodes: '200',
|
||||||
|
interval: cdk.Duration.seconds(30),
|
||||||
|
timeout: cdk.Duration.seconds(5),
|
||||||
|
healthyThresholdCount: 2,
|
||||||
|
unhealthyThresholdCount: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Auto Scaling
|
||||||
|
const scalableTarget = fargateService.service.autoScaleTaskCount({
|
||||||
|
minCapacity: 1,
|
||||||
|
maxCapacity: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
scalableTarget.scaleOnCpuUtilization('CpuScaling', {
|
||||||
|
targetUtilizationPercent: 70,
|
||||||
|
scaleInCooldown: cdk.Duration.minutes(5),
|
||||||
|
scaleOutCooldown: cdk.Duration.minutes(2),
|
||||||
|
});
|
||||||
|
|
||||||
|
scalableTarget.scaleOnMemoryUtilization('MemoryScaling', {
|
||||||
|
targetUtilizationPercent: 80,
|
||||||
|
scaleInCooldown: cdk.Duration.minutes(5),
|
||||||
|
scaleOutCooldown: cdk.Duration.minutes(2),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant ECS task access to RDS credentials
|
||||||
|
dbSecret.grantRead(fargateService.taskDefinition.taskRole);
|
||||||
|
|
||||||
|
this.apiUrl = 'https://api.syncos.syncrow.ae';
|
||||||
|
this.databaseEndpoint = dbCluster.clusterEndpoint.hostname;
|
||||||
|
|
||||||
|
// Outputs
|
||||||
|
new cdk.CfnOutput(this, 'ApiUrl', {
|
||||||
|
value: this.apiUrl,
|
||||||
|
description: 'Application Load Balancer URL',
|
||||||
|
exportName: `${this.stackName}-ApiUrl`,
|
||||||
|
});
|
||||||
|
|
||||||
|
new cdk.CfnOutput(this, 'DatabaseEndpoint', {
|
||||||
|
value: this.databaseEndpoint,
|
||||||
|
description: 'RDS Cluster Endpoint',
|
||||||
|
exportName: `${this.stackName}-DatabaseEndpoint`,
|
||||||
|
});
|
||||||
|
|
||||||
|
new cdk.CfnOutput(this, 'EcrRepositoryUri', {
|
||||||
|
value: ecrRepository.repositoryUri,
|
||||||
|
description: 'ECR Repository URI',
|
||||||
|
exportName: `${this.stackName}-EcrRepositoryUri`,
|
||||||
|
});
|
||||||
|
|
||||||
|
new cdk.CfnOutput(this, 'ClusterName', {
|
||||||
|
value: cluster.clusterName,
|
||||||
|
description: 'ECS Cluster Name',
|
||||||
|
exportName: `${this.stackName}-ClusterName`,
|
||||||
|
});
|
||||||
|
|
||||||
|
new cdk.CfnOutput(this, 'ServiceName', {
|
||||||
|
value: fargateService.service.serviceName,
|
||||||
|
description: 'ECS Service Name',
|
||||||
|
exportName: `${this.stackName}-ServiceName`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -1,18 +1,19 @@
|
|||||||
|
import { PlatformType } from '@app/common/constants/platform-type.enum';
|
||||||
|
import { RoleType } from '@app/common/constants/role.type.enum';
|
||||||
|
import { UserEntity } from '@app/common/modules/user/entities';
|
||||||
import {
|
import {
|
||||||
BadRequestException,
|
BadRequestException,
|
||||||
Injectable,
|
Injectable,
|
||||||
UnauthorizedException,
|
UnauthorizedException,
|
||||||
} from '@nestjs/common';
|
} from '@nestjs/common';
|
||||||
|
import { ConfigService } from '@nestjs/config';
|
||||||
import { JwtService } from '@nestjs/jwt';
|
import { JwtService } from '@nestjs/jwt';
|
||||||
import * as argon2 from 'argon2';
|
import * as argon2 from 'argon2';
|
||||||
import { HelperHashService } from '../../helper/services';
|
|
||||||
import { UserRepository } from '../../../../common/src/modules/user/repositories';
|
|
||||||
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
|
|
||||||
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
|
|
||||||
import { ConfigService } from '@nestjs/config';
|
|
||||||
import { OAuth2Client } from 'google-auth-library';
|
import { OAuth2Client } from 'google-auth-library';
|
||||||
import { PlatformType } from '@app/common/constants/platform-type.enum';
|
import { UserSessionEntity } from '../../../../common/src/modules/session/entities';
|
||||||
import { RoleType } from '@app/common/constants/role.type.enum';
|
import { UserSessionRepository } from '../../../../common/src/modules/session/repositories/session.repository';
|
||||||
|
import { UserRepository } from '../../../../common/src/modules/user/repositories';
|
||||||
|
import { HelperHashService } from '../../helper/services';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthService {
|
export class AuthService {
|
||||||
@ -32,7 +33,7 @@ export class AuthService {
|
|||||||
pass: string,
|
pass: string,
|
||||||
regionUuid?: string,
|
regionUuid?: string,
|
||||||
platform?: PlatformType,
|
platform?: PlatformType,
|
||||||
): Promise<any> {
|
): Promise<Omit<UserEntity, 'password'>> {
|
||||||
const user = await this.userRepository.findOne({
|
const user = await this.userRepository.findOne({
|
||||||
where: {
|
where: {
|
||||||
email,
|
email,
|
||||||
@ -40,16 +41,17 @@ export class AuthService {
|
|||||||
},
|
},
|
||||||
relations: ['roleType', 'project'],
|
relations: ['roleType', 'project'],
|
||||||
});
|
});
|
||||||
if (
|
|
||||||
platform === PlatformType.WEB &&
|
|
||||||
(user.roleType.type === RoleType.SPACE_OWNER ||
|
|
||||||
user.roleType.type === RoleType.SPACE_MEMBER)
|
|
||||||
) {
|
|
||||||
throw new UnauthorizedException('Access denied for web platform');
|
|
||||||
}
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
throw new BadRequestException('Invalid credentials');
|
throw new BadRequestException('Invalid credentials');
|
||||||
}
|
}
|
||||||
|
if (
|
||||||
|
platform === PlatformType.WEB &&
|
||||||
|
[RoleType.SPACE_OWNER, RoleType.SPACE_MEMBER].includes(
|
||||||
|
user.roleType.type as RoleType,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
throw new UnauthorizedException('Access denied for web platform');
|
||||||
|
}
|
||||||
|
|
||||||
if (!user.isUserVerified) {
|
if (!user.isUserVerified) {
|
||||||
throw new BadRequestException('User is not verified');
|
throw new BadRequestException('User is not verified');
|
||||||
@ -69,8 +71,9 @@ export class AuthService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||||
const { password, ...result } = user;
|
// const { password, ...result } = user;
|
||||||
return result;
|
delete user.password;
|
||||||
|
return user;
|
||||||
}
|
}
|
||||||
|
|
||||||
async createSession(data): Promise<UserSessionEntity> {
|
async createSession(data): Promise<UserSessionEntity> {
|
||||||
@ -113,6 +116,7 @@ export class AuthService {
|
|||||||
hasAcceptedWebAgreement: user.hasAcceptedWebAgreement,
|
hasAcceptedWebAgreement: user.hasAcceptedWebAgreement,
|
||||||
hasAcceptedAppAgreement: user.hasAcceptedAppAgreement,
|
hasAcceptedAppAgreement: user.hasAcceptedAppAgreement,
|
||||||
project: user?.project,
|
project: user?.project,
|
||||||
|
bookingPoints: user?.bookingPoints,
|
||||||
};
|
};
|
||||||
if (payload.googleCode) {
|
if (payload.googleCode) {
|
||||||
const profile = await this.getProfile(payload.googleCode);
|
const profile = await this.getProfile(payload.googleCode);
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
import { Module } from '@nestjs/common';
|
import { Module } from '@nestjs/common';
|
||||||
|
import { ConfigModule } from '@nestjs/config';
|
||||||
|
import { ErrorMessageService } from 'src/error-message/error-message.service';
|
||||||
|
import { AuthModule } from './auth/auth.module';
|
||||||
import { CommonService } from './common.service';
|
import { CommonService } from './common.service';
|
||||||
|
import config from './config';
|
||||||
import { DatabaseModule } from './database/database.module';
|
import { DatabaseModule } from './database/database.module';
|
||||||
import { HelperModule } from './helper/helper.module';
|
import { HelperModule } from './helper/helper.module';
|
||||||
import { AuthModule } from './auth/auth.module';
|
|
||||||
import { ConfigModule } from '@nestjs/config';
|
|
||||||
import config from './config';
|
|
||||||
import { EmailService } from './util/email.service';
|
|
||||||
import { ErrorMessageService } from 'src/error-message/error-message.service';
|
|
||||||
import { TuyaService } from './integrations/tuya/services/tuya.service';
|
import { TuyaService } from './integrations/tuya/services/tuya.service';
|
||||||
import { SceneDeviceRepository } from './modules/scene-device/repositories';
|
import { SceneDeviceRepository } from './modules/scene-device/repositories';
|
||||||
import { SpaceRepository } from './modules/space';
|
import { SpaceRepository } from './modules/space';
|
||||||
@ -15,6 +14,7 @@ import {
|
|||||||
SubspaceModelRepository,
|
SubspaceModelRepository,
|
||||||
} from './modules/space-model';
|
} from './modules/space-model';
|
||||||
import { SubspaceRepository } from './modules/space/repositories/subspace.repository';
|
import { SubspaceRepository } from './modules/space/repositories/subspace.repository';
|
||||||
|
import { EmailService } from './util/email/email.service';
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
CommonService,
|
CommonService,
|
||||||
|
@ -10,6 +10,8 @@ export default registerAs(
|
|||||||
SMTP_USER: process.env.SMTP_USER,
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
SMTP_SENDER: process.env.SMTP_SENDER,
|
SMTP_SENDER: process.env.SMTP_SENDER,
|
||||||
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
||||||
|
BATCH_EMAIL_API_URL: process.env.BATCH_EMAIL_API_URL,
|
||||||
|
SEND_EMAIL_API_URL: process.env.SEND_EMAIL_API_URL,
|
||||||
MAILTRAP_API_TOKEN: process.env.MAILTRAP_API_TOKEN,
|
MAILTRAP_API_TOKEN: process.env.MAILTRAP_API_TOKEN,
|
||||||
MAILTRAP_INVITATION_TEMPLATE_UUID:
|
MAILTRAP_INVITATION_TEMPLATE_UUID:
|
||||||
process.env.MAILTRAP_INVITATION_TEMPLATE_UUID,
|
process.env.MAILTRAP_INVITATION_TEMPLATE_UUID,
|
||||||
@ -21,5 +23,9 @@ export default registerAs(
|
|||||||
process.env.MAILTRAP_EDIT_USER_TEMPLATE_UUID,
|
process.env.MAILTRAP_EDIT_USER_TEMPLATE_UUID,
|
||||||
MAILTRAP_SEND_OTP_TEMPLATE_UUID:
|
MAILTRAP_SEND_OTP_TEMPLATE_UUID:
|
||||||
process.env.MAILTRAP_SEND_OTP_TEMPLATE_UUID,
|
process.env.MAILTRAP_SEND_OTP_TEMPLATE_UUID,
|
||||||
|
MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID:
|
||||||
|
process.env.MAILTRAP_SEND_BOOKING_AVAILABILITY_UPDATE_TEMPLATE_UUID,
|
||||||
|
MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID:
|
||||||
|
process.env.MAILTRAP_SEND_BOOKING_TIMING_UPDATE_TEMPLATE_UUID,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
@ -69,7 +69,47 @@ export class ControllerRoute {
|
|||||||
'Retrieve the list of all regions registered in Syncrow.';
|
'Retrieve the list of all regions registered in Syncrow.';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
static BOOKABLE_SPACES = class {
|
||||||
|
public static readonly ROUTE = 'bookable-spaces';
|
||||||
|
static ACTIONS = class {
|
||||||
|
public static readonly ADD_BOOKABLE_SPACES_SUMMARY =
|
||||||
|
'Add new bookable spaces';
|
||||||
|
|
||||||
|
public static readonly ADD_BOOKABLE_SPACES_DESCRIPTION =
|
||||||
|
'This endpoint allows you to add new bookable spaces by providing the required details.';
|
||||||
|
|
||||||
|
public static readonly GET_ALL_BOOKABLE_SPACES_SUMMARY =
|
||||||
|
'Get all bookable spaces';
|
||||||
|
|
||||||
|
public static readonly GET_ALL_BOOKABLE_SPACES_DESCRIPTION =
|
||||||
|
'This endpoint retrieves all bookable spaces.';
|
||||||
|
|
||||||
|
public static readonly UPDATE_BOOKABLE_SPACES_SUMMARY =
|
||||||
|
'Update existing bookable spaces';
|
||||||
|
|
||||||
|
public static readonly UPDATE_BOOKABLE_SPACES_DESCRIPTION =
|
||||||
|
'This endpoint allows you to update existing bookable spaces by providing the required details.';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
static BOOKING = class {
|
||||||
|
public static readonly ROUTE = 'bookings';
|
||||||
|
static ACTIONS = class {
|
||||||
|
public static readonly ADD_BOOKING_SUMMARY = 'Add new booking';
|
||||||
|
|
||||||
|
public static readonly ADD_BOOKING_DESCRIPTION =
|
||||||
|
'This endpoint allows you to add new booking by providing the required details.';
|
||||||
|
|
||||||
|
public static readonly GET_ALL_BOOKINGS_SUMMARY = 'Get all bookings';
|
||||||
|
|
||||||
|
public static readonly GET_ALL_BOOKINGS_DESCRIPTION =
|
||||||
|
'This endpoint retrieves all bookings.';
|
||||||
|
|
||||||
|
public static readonly GET_MY_BOOKINGS_SUMMARY = 'Get my bookings';
|
||||||
|
|
||||||
|
public static readonly GET_MY_BOOKINGS_DESCRIPTION =
|
||||||
|
'This endpoint retrieves all bookings for the authenticated user.';
|
||||||
|
};
|
||||||
|
};
|
||||||
static COMMUNITY = class {
|
static COMMUNITY = class {
|
||||||
public static readonly ROUTE = '/projects/:projectUuid/communities';
|
public static readonly ROUTE = '/projects/:projectUuid/communities';
|
||||||
static ACTIONS = class {
|
static ACTIONS = class {
|
||||||
@ -199,6 +239,11 @@ export class ControllerRoute {
|
|||||||
public static readonly UPDATE_SPACE_DESCRIPTION =
|
public static readonly UPDATE_SPACE_DESCRIPTION =
|
||||||
'Updates a space by its UUID and community ID. You can update the name, parent space, and other properties. If a parent space is provided and not already a parent, its `isParent` flag will be set to true.';
|
'Updates a space by its UUID and community ID. You can update the name, parent space, and other properties. If a parent space is provided and not already a parent, its `isParent` flag will be set to true.';
|
||||||
|
|
||||||
|
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_SUMMARY =
|
||||||
|
'Update the order of child spaces under a specific parent space';
|
||||||
|
public static readonly UPDATE_CHILDREN_SPACES_ORDER_OF_A_SPACE_DESCRIPTION =
|
||||||
|
'Updates the order of child spaces under a specific parent space. You can provide a new order for the child spaces.';
|
||||||
|
|
||||||
public static readonly GET_HEIRARCHY_SUMMARY = 'Get space hierarchy';
|
public static readonly GET_HEIRARCHY_SUMMARY = 'Get space hierarchy';
|
||||||
public static readonly GET_HEIRARCHY_DESCRIPTION =
|
public static readonly GET_HEIRARCHY_DESCRIPTION =
|
||||||
'This endpoint retrieves the hierarchical structure of spaces under a given space ID. It returns all the child spaces nested within the specified space, organized by their parent-child relationships. ';
|
'This endpoint retrieves the hierarchical structure of spaces under a given space ID. It returns all the child spaces nested within the specified space, organized by their parent-child relationships. ';
|
||||||
@ -397,6 +442,11 @@ export class ControllerRoute {
|
|||||||
public static readonly DELETE_USER_SUMMARY = 'Delete user by UUID';
|
public static readonly DELETE_USER_SUMMARY = 'Delete user by UUID';
|
||||||
public static readonly DELETE_USER_DESCRIPTION =
|
public static readonly DELETE_USER_DESCRIPTION =
|
||||||
'This endpoint deletes a user identified by their UUID. Accessible only by users with the Super Admin role.';
|
'This endpoint deletes a user identified by their UUID. Accessible only by users with the Super Admin role.';
|
||||||
|
|
||||||
|
public static readonly DELETE_USER_PROFILE_SUMMARY =
|
||||||
|
'Delete user profile by UUID';
|
||||||
|
public static readonly DELETE_USER_PROFILE_DESCRIPTION =
|
||||||
|
'This endpoint deletes a user profile identified by their UUID. Accessible only by users with the Super Admin role.';
|
||||||
public static readonly UPDATE_USER_WEB_AGREEMENT_SUMMARY =
|
public static readonly UPDATE_USER_WEB_AGREEMENT_SUMMARY =
|
||||||
'Update user web agreement by user UUID';
|
'Update user web agreement by user UUID';
|
||||||
public static readonly UPDATE_USER_WEB_AGREEMENT_DESCRIPTION =
|
public static readonly UPDATE_USER_WEB_AGREEMENT_DESCRIPTION =
|
||||||
@ -465,7 +515,16 @@ export class ControllerRoute {
|
|||||||
'This endpoint retrieves the terms and conditions for the application.';
|
'This endpoint retrieves the terms and conditions for the application.';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
static WEATHER = class {
|
||||||
|
public static readonly ROUTE = 'weather';
|
||||||
|
|
||||||
|
static ACTIONS = class {
|
||||||
|
public static readonly FETCH_WEATHER_DETAILS_SUMMARY =
|
||||||
|
'Fetch Weather Details';
|
||||||
|
public static readonly FETCH_WEATHER_DETAILS_DESCRIPTION =
|
||||||
|
'This endpoint retrieves the current weather details for a specified location like temperature, humidity, etc.';
|
||||||
|
};
|
||||||
|
};
|
||||||
static PRIVACY_POLICY = class {
|
static PRIVACY_POLICY = class {
|
||||||
public static readonly ROUTE = 'policy';
|
public static readonly ROUTE = 'policy';
|
||||||
|
|
||||||
@ -492,7 +551,6 @@ export class ControllerRoute {
|
|||||||
};
|
};
|
||||||
static PowerClamp = class {
|
static PowerClamp = class {
|
||||||
public static readonly ROUTE = 'power-clamp';
|
public static readonly ROUTE = 'power-clamp';
|
||||||
|
|
||||||
static ACTIONS = class {
|
static ACTIONS = class {
|
||||||
public static readonly GET_ENERGY_SUMMARY =
|
public static readonly GET_ENERGY_SUMMARY =
|
||||||
'Get power clamp historical data';
|
'Get power clamp historical data';
|
||||||
@ -515,6 +573,20 @@ export class ControllerRoute {
|
|||||||
'This endpoint retrieves the occupancy heat map data based on the provided parameters.';
|
'This endpoint retrieves the occupancy heat map data based on the provided parameters.';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
static AQI = class {
|
||||||
|
public static readonly ROUTE = 'aqi';
|
||||||
|
|
||||||
|
static ACTIONS = class {
|
||||||
|
public static readonly GET_AQI_RANGE_DATA_SUMMARY = 'Get AQI range data';
|
||||||
|
public static readonly GET_AQI_RANGE_DATA_DESCRIPTION =
|
||||||
|
'This endpoint retrieves the AQI (Air Quality Index) range data based on the provided parameters.';
|
||||||
|
|
||||||
|
public static readonly GET_AQI_DISTRIBUTION_DATA_SUMMARY =
|
||||||
|
'Get AQI distribution data';
|
||||||
|
public static readonly GET_AQI_DISTRIBUTION_DATA_DESCRIPTION =
|
||||||
|
'This endpoint retrieves the AQI (Air Quality Index) distribution data based on the provided parameters.';
|
||||||
|
};
|
||||||
|
};
|
||||||
static DEVICE = class {
|
static DEVICE = class {
|
||||||
public static readonly ROUTE = 'devices';
|
public static readonly ROUTE = 'devices';
|
||||||
|
|
||||||
@ -605,6 +677,11 @@ export class ControllerRoute {
|
|||||||
'Delete scenes by device uuid and switch name';
|
'Delete scenes by device uuid and switch name';
|
||||||
public static readonly DELETE_SCENES_BY_SWITCH_NAME_DESCRIPTION =
|
public static readonly DELETE_SCENES_BY_SWITCH_NAME_DESCRIPTION =
|
||||||
'This endpoint deletes all scenes associated with a specific switch device.';
|
'This endpoint deletes all scenes associated with a specific switch device.';
|
||||||
|
|
||||||
|
public static readonly POPULATE_TUYA_CONST_UUID_SUMMARY =
|
||||||
|
'Populate Tuya const UUID';
|
||||||
|
public static readonly POPULATE_TUYA_CONST_UUID_DESCRIPTION =
|
||||||
|
'This endpoint populates the Tuya const UUID for all devices.';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
static DEVICE_COMMISSION = class {
|
static DEVICE_COMMISSION = class {
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
export const SEND_EMAIL_API_URL_PROD = 'https://send.api.mailtrap.io/api/send/';
|
|
||||||
export const SEND_EMAIL_API_URL_DEV =
|
|
||||||
'https://sandbox.api.mailtrap.io/api/send/2634012';
|
|
8
libs/common/src/constants/pollutants.enum.ts
Normal file
8
libs/common/src/constants/pollutants.enum.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
export enum PollutantType {
|
||||||
|
AQI = 'aqi',
|
||||||
|
PM25 = 'pm25',
|
||||||
|
PM10 = 'pm10',
|
||||||
|
VOC = 'voc',
|
||||||
|
CO2 = 'co2',
|
||||||
|
CH2O = 'ch2o',
|
||||||
|
}
|
@ -15,8 +15,10 @@ export enum ProductType {
|
|||||||
WL = 'WL',
|
WL = 'WL',
|
||||||
GD = 'GD',
|
GD = 'GD',
|
||||||
CUR = 'CUR',
|
CUR = 'CUR',
|
||||||
|
CUR_2 = 'CUR_2',
|
||||||
PC = 'PC',
|
PC = 'PC',
|
||||||
FOUR_S = '4S',
|
FOUR_S = '4S',
|
||||||
SIX_S = '6S',
|
SIX_S = '6S',
|
||||||
SOS = 'SOS',
|
SOS = 'SOS',
|
||||||
|
AQI = 'AQI',
|
||||||
}
|
}
|
||||||
|
@ -1,51 +1,33 @@
|
|||||||
import { Module } from '@nestjs/common';
|
import { Module } from '@nestjs/common';
|
||||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||||
import { SnakeNamingStrategy } from './strategies';
|
|
||||||
import { UserEntity } from '../modules/user/entities/user.entity';
|
|
||||||
import { UserSessionEntity } from '../modules/session/entities/session.entity';
|
|
||||||
import { UserOtpEntity } from '../modules/user/entities';
|
|
||||||
import { ProductEntity } from '../modules/product/entities';
|
|
||||||
import { DeviceEntity } from '../modules/device/entities';
|
import { DeviceEntity } from '../modules/device/entities';
|
||||||
import { PermissionTypeEntity } from '../modules/permission/entities';
|
import { PermissionTypeEntity } from '../modules/permission/entities';
|
||||||
|
import { ProductEntity } from '../modules/product/entities';
|
||||||
|
import { UserSessionEntity } from '../modules/session/entities/session.entity';
|
||||||
|
import { UserOtpEntity } from '../modules/user/entities';
|
||||||
|
import { UserEntity } from '../modules/user/entities/user.entity';
|
||||||
|
import { SnakeNamingStrategy } from './strategies';
|
||||||
|
|
||||||
import { UserSpaceEntity } from '../modules/user/entities';
|
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger';
|
||||||
import { DeviceUserPermissionEntity } from '../modules/device/entities';
|
import { createLogger } from 'winston';
|
||||||
import { RoleTypeEntity } from '../modules/role-type/entities';
|
import { winstonLoggerOptions } from '../logger/services/winston.logger';
|
||||||
import { UserNotificationEntity } from '../modules/user/entities';
|
import { AqiSpaceDailyPollutantStatsEntity } from '../modules/aqi/entities';
|
||||||
import { DeviceNotificationEntity } from '../modules/device/entities';
|
import { AutomationEntity } from '../modules/automation/entities';
|
||||||
import { RegionEntity } from '../modules/region/entities';
|
import { BookableSpaceEntity } from '../modules/booking/entities/bookable-space.entity';
|
||||||
import { TimeZoneEntity } from '../modules/timezone/entities';
|
import { BookingEntity } from '../modules/booking/entities/booking.entity';
|
||||||
import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
|
import { ClientEntity } from '../modules/client/entities';
|
||||||
import { CommunityEntity } from '../modules/community/entities';
|
import { CommunityEntity } from '../modules/community/entities';
|
||||||
import { DeviceStatusLogEntity } from '../modules/device-status-log/entities';
|
import { DeviceStatusLogEntity } from '../modules/device-status-log/entities';
|
||||||
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
|
|
||||||
import { SceneDeviceEntity } from '../modules/scene-device/entities';
|
|
||||||
import { ProjectEntity } from '../modules/project/entities';
|
|
||||||
import {
|
import {
|
||||||
SpaceModelEntity,
|
DeviceNotificationEntity,
|
||||||
SubspaceModelEntity,
|
DeviceUserPermissionEntity,
|
||||||
TagModel,
|
} from '../modules/device/entities';
|
||||||
SpaceModelProductAllocationEntity,
|
|
||||||
SubspaceModelProductAllocationEntity,
|
|
||||||
} from '../modules/space-model/entities';
|
|
||||||
import {
|
import {
|
||||||
InviteUserEntity,
|
InviteUserEntity,
|
||||||
InviteUserSpaceEntity,
|
InviteUserSpaceEntity,
|
||||||
} from '../modules/Invite-user/entities';
|
} from '../modules/Invite-user/entities';
|
||||||
import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
|
import { SpaceDailyOccupancyDurationEntity } from '../modules/occupancy/entities';
|
||||||
import { AutomationEntity } from '../modules/automation/entities';
|
|
||||||
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
|
|
||||||
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
|
|
||||||
import { SpaceEntity } from '../modules/space/entities/space.entity';
|
|
||||||
import { SpaceLinkEntity } from '../modules/space/entities/space-link.entity';
|
|
||||||
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
|
|
||||||
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
|
|
||||||
import { TagEntity } from '../modules/space/entities/tag.entity';
|
|
||||||
import { ClientEntity } from '../modules/client/entities';
|
|
||||||
import { TypeOrmWinstonLogger } from '@app/common/logger/services/typeorm.logger';
|
|
||||||
import { createLogger } from 'winston';
|
|
||||||
import { winstonLoggerOptions } from '../logger/services/winston.logger';
|
|
||||||
import {
|
import {
|
||||||
PowerClampDailyEntity,
|
PowerClampDailyEntity,
|
||||||
PowerClampHourlyEntity,
|
PowerClampHourlyEntity,
|
||||||
@ -55,6 +37,29 @@ import {
|
|||||||
PresenceSensorDailyDeviceEntity,
|
PresenceSensorDailyDeviceEntity,
|
||||||
PresenceSensorDailySpaceEntity,
|
PresenceSensorDailySpaceEntity,
|
||||||
} from '../modules/presence-sensor/entities';
|
} from '../modules/presence-sensor/entities';
|
||||||
|
import { ProjectEntity } from '../modules/project/entities';
|
||||||
|
import { RegionEntity } from '../modules/region/entities';
|
||||||
|
import { RoleTypeEntity } from '../modules/role-type/entities';
|
||||||
|
import { SceneDeviceEntity } from '../modules/scene-device/entities';
|
||||||
|
import { SceneEntity, SceneIconEntity } from '../modules/scene/entities';
|
||||||
|
import {
|
||||||
|
SpaceModelEntity,
|
||||||
|
SpaceModelProductAllocationEntity,
|
||||||
|
SubspaceModelEntity,
|
||||||
|
SubspaceModelProductAllocationEntity,
|
||||||
|
} from '../modules/space-model/entities';
|
||||||
|
import { InviteSpaceEntity } from '../modules/space/entities/invite-space.entity';
|
||||||
|
import { SpaceProductAllocationEntity } from '../modules/space/entities/space-product-allocation.entity';
|
||||||
|
import { SpaceEntity } from '../modules/space/entities/space.entity';
|
||||||
|
import { SubspaceProductAllocationEntity } from '../modules/space/entities/subspace/subspace-product-allocation.entity';
|
||||||
|
import { SubspaceEntity } from '../modules/space/entities/subspace/subspace.entity';
|
||||||
|
import { NewTagEntity } from '../modules/tag/entities/tag.entity';
|
||||||
|
import { TimeZoneEntity } from '../modules/timezone/entities';
|
||||||
|
import {
|
||||||
|
UserNotificationEntity,
|
||||||
|
UserSpaceEntity,
|
||||||
|
} from '../modules/user/entities';
|
||||||
|
import { VisitorPasswordEntity } from '../modules/visitor-password/entities';
|
||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
TypeOrmModule.forRootAsync({
|
TypeOrmModule.forRootAsync({
|
||||||
@ -83,9 +88,7 @@ import {
|
|||||||
PermissionTypeEntity,
|
PermissionTypeEntity,
|
||||||
CommunityEntity,
|
CommunityEntity,
|
||||||
SpaceEntity,
|
SpaceEntity,
|
||||||
SpaceLinkEntity,
|
|
||||||
SubspaceEntity,
|
SubspaceEntity,
|
||||||
TagEntity,
|
|
||||||
UserSpaceEntity,
|
UserSpaceEntity,
|
||||||
DeviceUserPermissionEntity,
|
DeviceUserPermissionEntity,
|
||||||
RoleTypeEntity,
|
RoleTypeEntity,
|
||||||
@ -100,7 +103,6 @@ import {
|
|||||||
SceneDeviceEntity,
|
SceneDeviceEntity,
|
||||||
SpaceModelEntity,
|
SpaceModelEntity,
|
||||||
SubspaceModelEntity,
|
SubspaceModelEntity,
|
||||||
TagModel,
|
|
||||||
InviteUserEntity,
|
InviteUserEntity,
|
||||||
InviteUserSpaceEntity,
|
InviteUserSpaceEntity,
|
||||||
InviteSpaceEntity,
|
InviteSpaceEntity,
|
||||||
@ -115,6 +117,10 @@ import {
|
|||||||
PowerClampMonthlyEntity,
|
PowerClampMonthlyEntity,
|
||||||
PresenceSensorDailyDeviceEntity,
|
PresenceSensorDailyDeviceEntity,
|
||||||
PresenceSensorDailySpaceEntity,
|
PresenceSensorDailySpaceEntity,
|
||||||
|
AqiSpaceDailyPollutantStatsEntity,
|
||||||
|
SpaceDailyOccupancyDurationEntity,
|
||||||
|
BookableSpaceEntity,
|
||||||
|
BookingEntity,
|
||||||
],
|
],
|
||||||
namingStrategy: new SnakeNamingStrategy(),
|
namingStrategy: new SnakeNamingStrategy(),
|
||||||
synchronize: Boolean(JSON.parse(configService.get('DB_SYNC'))),
|
synchronize: Boolean(JSON.parse(configService.get('DB_SYNC'))),
|
||||||
@ -123,8 +129,8 @@ import {
|
|||||||
logger: typeOrmLogger,
|
logger: typeOrmLogger,
|
||||||
extra: {
|
extra: {
|
||||||
charset: 'utf8mb4',
|
charset: 'utf8mb4',
|
||||||
max: 20, // set pool max size
|
max: 100, // set pool max size
|
||||||
idleTimeoutMillis: 5000, // close idle clients after 5 second
|
idleTimeoutMillis: 3000, // close idle clients after 5 second
|
||||||
connectionTimeoutMillis: 12_000, // return an error after 11 second if connection could not be established
|
connectionTimeoutMillis: 12_000, // return an error after 11 second if connection could not be established
|
||||||
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
|
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
|
||||||
},
|
},
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
import { IsBoolean, IsDate, IsOptional } from 'class-validator';
|
|
||||||
import { IsPageRequestParam } from '../validators/is-page-request-param.validator';
|
|
||||||
import { ApiProperty } from '@nestjs/swagger';
|
import { ApiProperty } from '@nestjs/swagger';
|
||||||
import { IsSizeRequestParam } from '../validators/is-size-request-param.validator';
|
|
||||||
import { Transform } from 'class-transformer';
|
import { Transform } from 'class-transformer';
|
||||||
import { parseToDate } from '../util/parseToDate';
|
import { IsBoolean, IsOptional } from 'class-validator';
|
||||||
import { BooleanValues } from '../constants/boolean-values.enum';
|
import { BooleanValues } from '../constants/boolean-values.enum';
|
||||||
|
import { IsPageRequestParam } from '../validators/is-page-request-param.validator';
|
||||||
|
import { IsSizeRequestParam } from '../validators/is-size-request-param.validator';
|
||||||
|
|
||||||
export class PaginationRequestGetListDto {
|
export class PaginationRequestGetListDto {
|
||||||
@ApiProperty({
|
@ApiProperty({
|
||||||
@ -19,6 +18,7 @@ export class PaginationRequestGetListDto {
|
|||||||
return value.obj.includeSpaces === BooleanValues.TRUE;
|
return value.obj.includeSpaces === BooleanValues.TRUE;
|
||||||
})
|
})
|
||||||
public includeSpaces?: boolean = false;
|
public includeSpaces?: boolean = false;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsPageRequestParam({
|
@IsPageRequestParam({
|
||||||
message: 'Page must be bigger than 0',
|
message: 'Page must be bigger than 0',
|
||||||
@ -40,40 +40,4 @@ export class PaginationRequestGetListDto {
|
|||||||
description: 'Size request',
|
description: 'Size request',
|
||||||
})
|
})
|
||||||
size?: number;
|
size?: number;
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@ApiProperty({
|
|
||||||
name: 'name',
|
|
||||||
required: false,
|
|
||||||
description: 'Name to be filtered',
|
|
||||||
})
|
|
||||||
name?: string;
|
|
||||||
|
|
||||||
@ApiProperty({
|
|
||||||
name: 'from',
|
|
||||||
required: false,
|
|
||||||
type: Number,
|
|
||||||
description: `Start time in UNIX timestamp format to filter`,
|
|
||||||
example: 1674172800000,
|
|
||||||
})
|
|
||||||
@IsOptional()
|
|
||||||
@Transform(({ value }) => parseToDate(value))
|
|
||||||
@IsDate({
|
|
||||||
message: `From must be in UNIX timestamp format in order to parse to Date instance`,
|
|
||||||
})
|
|
||||||
from?: Date;
|
|
||||||
|
|
||||||
@ApiProperty({
|
|
||||||
name: 'to',
|
|
||||||
required: false,
|
|
||||||
type: Number,
|
|
||||||
description: `End time in UNIX timestamp format to filter`,
|
|
||||||
example: 1674259200000,
|
|
||||||
})
|
|
||||||
@IsOptional()
|
|
||||||
@Transform(({ value }) => parseToDate(value))
|
|
||||||
@IsDate({
|
|
||||||
message: `To must be in UNIX timestamp format in order to parse to Date instance`,
|
|
||||||
})
|
|
||||||
to?: Date;
|
|
||||||
}
|
}
|
||||||
|
@ -3,26 +3,12 @@ import { DeviceStatusFirebaseController } from './controllers/devices-status.con
|
|||||||
import { DeviceStatusFirebaseService } from './services/devices-status.service';
|
import { DeviceStatusFirebaseService } from './services/devices-status.service';
|
||||||
import { DeviceRepository } from '@app/common/modules/device/repositories';
|
import { DeviceRepository } from '@app/common/modules/device/repositories';
|
||||||
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories/device-status.repository';
|
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories/device-status.repository';
|
||||||
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
|
|
||||||
import {
|
|
||||||
PowerClampHourlyRepository,
|
|
||||||
PowerClampDailyRepository,
|
|
||||||
PowerClampMonthlyRepository,
|
|
||||||
} from '@app/common/modules/power-clamp/repositories';
|
|
||||||
import { SqlLoaderService } from '@app/common/helper/services/sql-loader.service';
|
|
||||||
import { OccupancyService } from '@app/common/helper/services/occupancy.service';
|
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
DeviceStatusFirebaseService,
|
DeviceStatusFirebaseService,
|
||||||
DeviceRepository,
|
DeviceRepository,
|
||||||
DeviceStatusLogRepository,
|
DeviceStatusLogRepository,
|
||||||
PowerClampService,
|
|
||||||
PowerClampHourlyRepository,
|
|
||||||
PowerClampDailyRepository,
|
|
||||||
PowerClampMonthlyRepository,
|
|
||||||
SqlLoaderService,
|
|
||||||
OccupancyService,
|
|
||||||
],
|
],
|
||||||
controllers: [DeviceStatusFirebaseController],
|
controllers: [DeviceStatusFirebaseController],
|
||||||
exports: [DeviceStatusFirebaseService, DeviceStatusLogRepository],
|
exports: [DeviceStatusFirebaseService, DeviceStatusLogRepository],
|
||||||
|
@ -13,6 +13,7 @@ class StatusDto {
|
|||||||
|
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
value: any;
|
value: any;
|
||||||
|
t?: string | number | Date;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class AddDeviceStatusDto {
|
export class AddDeviceStatusDto {
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
|
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories';
|
||||||
|
import { DeviceRepository } from '@app/common/modules/device/repositories';
|
||||||
import {
|
import {
|
||||||
HttpException,
|
HttpException,
|
||||||
HttpStatus,
|
HttpStatus,
|
||||||
Injectable,
|
Injectable,
|
||||||
NotFoundException,
|
NotFoundException,
|
||||||
} from '@nestjs/common';
|
} from '@nestjs/common';
|
||||||
import { AddDeviceStatusDto } from '../dtos/add.devices-status.dto';
|
|
||||||
import { DeviceRepository } from '@app/common/modules/device/repositories';
|
|
||||||
import { GetDeviceDetailsFunctionsStatusInterface } from 'src/device/interfaces/get.device.interface';
|
|
||||||
import { TuyaContext } from '@tuya/tuya-connector-nodejs';
|
|
||||||
import { ConfigService } from '@nestjs/config';
|
import { ConfigService } from '@nestjs/config';
|
||||||
import { firebaseDataBase } from '../../firebase.config';
|
import { TuyaContext } from '@tuya/tuya-connector-nodejs';
|
||||||
import {
|
import {
|
||||||
Database,
|
Database,
|
||||||
DataSnapshot,
|
DataSnapshot,
|
||||||
@ -17,21 +15,18 @@ import {
|
|||||||
ref,
|
ref,
|
||||||
runTransaction,
|
runTransaction,
|
||||||
} from 'firebase/database';
|
} from 'firebase/database';
|
||||||
import { DeviceStatusLogRepository } from '@app/common/modules/device-status-log/repositories';
|
import { GetDeviceDetailsFunctionsStatusInterface } from 'src/device/interfaces/get.device.interface';
|
||||||
import { ProductType } from '@app/common/constants/product-type.enum';
|
import { firebaseDataBase } from '../../firebase.config';
|
||||||
import { PowerClampService } from '@app/common/helper/services/power.clamp.service';
|
import { AddDeviceStatusDto } from '../dtos/add.devices-status.dto';
|
||||||
import { PowerClampEnergyEnum } from '@app/common/constants/power.clamp.enargy.enum';
|
|
||||||
import { PresenceSensorEnum } from '@app/common/constants/presence.sensor.enum';
|
|
||||||
import { OccupancyService } from '@app/common/helper/services/occupancy.service';
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class DeviceStatusFirebaseService {
|
export class DeviceStatusFirebaseService {
|
||||||
private tuya: TuyaContext;
|
private tuya: TuyaContext;
|
||||||
private firebaseDb: Database;
|
private firebaseDb: Database;
|
||||||
|
private readonly isDevEnv: boolean;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly configService: ConfigService,
|
private readonly configService: ConfigService,
|
||||||
private readonly deviceRepository: DeviceRepository,
|
private readonly deviceRepository: DeviceRepository,
|
||||||
private readonly powerClampService: PowerClampService,
|
|
||||||
private readonly occupancyService: OccupancyService,
|
|
||||||
private deviceStatusLogRepository: DeviceStatusLogRepository,
|
private deviceStatusLogRepository: DeviceStatusLogRepository,
|
||||||
) {
|
) {
|
||||||
const accessKey = this.configService.get<string>('auth-config.ACCESS_KEY');
|
const accessKey = this.configService.get<string>('auth-config.ACCESS_KEY');
|
||||||
@ -44,7 +39,14 @@ export class DeviceStatusFirebaseService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Initialize firebaseDb using firebaseDataBase function
|
// Initialize firebaseDb using firebaseDataBase function
|
||||||
this.firebaseDb = firebaseDataBase(this.configService);
|
try {
|
||||||
|
this.firebaseDb = firebaseDataBase(this.configService);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Firebase initialization failed, continuing without Firebase:', error.message);
|
||||||
|
this.firebaseDb = null;
|
||||||
|
}
|
||||||
|
this.isDevEnv =
|
||||||
|
this.configService.get<string>('NODE_ENV') === 'development';
|
||||||
}
|
}
|
||||||
async addDeviceStatusByDeviceUuid(
|
async addDeviceStatusByDeviceUuid(
|
||||||
deviceTuyaUuid: string,
|
deviceTuyaUuid: string,
|
||||||
@ -59,7 +61,7 @@ export class DeviceStatusFirebaseService {
|
|||||||
const deviceStatusSaved = await this.createDeviceStatusFirebase({
|
const deviceStatusSaved = await this.createDeviceStatusFirebase({
|
||||||
deviceUuid: device.uuid,
|
deviceUuid: device.uuid,
|
||||||
deviceTuyaUuid: deviceTuyaUuid,
|
deviceTuyaUuid: deviceTuyaUuid,
|
||||||
status: deviceStatus.status,
|
status: deviceStatus?.status,
|
||||||
productUuid: deviceStatus.productUuid,
|
productUuid: deviceStatus.productUuid,
|
||||||
productType: deviceStatus.productType,
|
productType: deviceStatus.productType,
|
||||||
});
|
});
|
||||||
@ -74,25 +76,107 @@ export class DeviceStatusFirebaseService {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async addDeviceStatusToFirebase(
|
async addBatchDeviceStatusToOurDb(
|
||||||
addDeviceStatusDto: AddDeviceStatusDto,
|
batch: {
|
||||||
): Promise<AddDeviceStatusDto | null> {
|
deviceTuyaUuid: string;
|
||||||
try {
|
status: any;
|
||||||
const device = await this.getDeviceByDeviceTuyaUuid(
|
log: any;
|
||||||
addDeviceStatusDto.deviceTuyaUuid,
|
device: any;
|
||||||
|
}[],
|
||||||
|
): Promise<void> {
|
||||||
|
console.log(`🔁 Preparing logs from batch of ${batch.length} items...`);
|
||||||
|
|
||||||
|
const allLogs = [];
|
||||||
|
|
||||||
|
for (const item of batch) {
|
||||||
|
const device = item.device;
|
||||||
|
|
||||||
|
if (!device?.uuid) {
|
||||||
|
console.log(`⛔ Skipped unknown device: ${item.deviceTuyaUuid}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine properties based on environment
|
||||||
|
const properties =
|
||||||
|
this.isDevEnv && Array.isArray(item.log?.properties)
|
||||||
|
? item.log.properties
|
||||||
|
: Array.isArray(item.status)
|
||||||
|
? item.status
|
||||||
|
: null;
|
||||||
|
|
||||||
|
if (!properties) {
|
||||||
|
console.log(
|
||||||
|
`⛔ Skipped invalid status/properties for device: ${item.deviceTuyaUuid}`,
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const logs = properties.map((property) =>
|
||||||
|
this.deviceStatusLogRepository.create({
|
||||||
|
deviceId: device.uuid,
|
||||||
|
deviceTuyaId: item.deviceTuyaUuid,
|
||||||
|
productId: device.productDevice?.uuid,
|
||||||
|
log: item.log,
|
||||||
|
code: property.code,
|
||||||
|
value: property.value,
|
||||||
|
eventId: item.log?.dataId,
|
||||||
|
eventTime: new Date(
|
||||||
|
this.isDevEnv ? property.time : property.t,
|
||||||
|
).toISOString(),
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
allLogs.push(...logs);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`📝 Total logs to insert: ${allLogs.length}`);
|
||||||
|
|
||||||
|
const chunkSize = 300;
|
||||||
|
let insertedCount = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < allLogs.length; i += chunkSize) {
|
||||||
|
const chunk = allLogs.slice(i, i + chunkSize);
|
||||||
|
try {
|
||||||
|
const result = await this.deviceStatusLogRepository
|
||||||
|
.createQueryBuilder()
|
||||||
|
.insert()
|
||||||
|
.into('device-status-log')
|
||||||
|
.values(chunk)
|
||||||
|
.orIgnore()
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
insertedCount += result.identifiers.length;
|
||||||
|
console.log(
|
||||||
|
`✅ Inserted ${result.identifiers.length} / ${chunk.length} logs (chunk)`,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Insert error (skipped chunk):', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`✅ Total logs inserted: ${insertedCount} / ${allLogs.length}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async addDeviceStatusToFirebase(
|
||||||
|
addDeviceStatusDto: AddDeviceStatusDto & { device?: any },
|
||||||
|
): Promise<AddDeviceStatusDto | null> {
|
||||||
|
try {
|
||||||
|
let device = addDeviceStatusDto.device;
|
||||||
|
if (!device) {
|
||||||
|
device = await this.getDeviceByDeviceTuyaUuid(
|
||||||
|
addDeviceStatusDto.deviceTuyaUuid,
|
||||||
|
);
|
||||||
|
}
|
||||||
if (device?.uuid) {
|
if (device?.uuid) {
|
||||||
return await this.createDeviceStatusFirebase({
|
return await this.createDeviceStatusFirebase({
|
||||||
deviceUuid: device.uuid,
|
deviceUuid: device.uuid,
|
||||||
...addDeviceStatusDto,
|
...addDeviceStatusDto,
|
||||||
productType: device.productDevice.prodType,
|
productType: device.productDevice?.prodType,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Return null if device not found or no UUID
|
// Return null if device not found or no UUID
|
||||||
return null;
|
return null;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Handle the error silently, perhaps log it internally or ignore it
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -106,6 +190,15 @@ export class DeviceStatusFirebaseService {
|
|||||||
relations: ['productDevice'],
|
relations: ['productDevice'],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
async getAllDevices() {
|
||||||
|
return await this.deviceRepository.find({
|
||||||
|
where: {
|
||||||
|
isActive: true,
|
||||||
|
},
|
||||||
|
relations: ['productDevice'],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async getDevicesInstructionStatus(deviceUuid: string) {
|
async getDevicesInstructionStatus(deviceUuid: string) {
|
||||||
try {
|
try {
|
||||||
const deviceDetails = await this.getDeviceByDeviceUuid(deviceUuid);
|
const deviceDetails = await this.getDeviceByDeviceUuid(deviceUuid);
|
||||||
@ -120,7 +213,7 @@ export class DeviceStatusFirebaseService {
|
|||||||
return {
|
return {
|
||||||
productUuid: deviceDetails.productDevice.uuid,
|
productUuid: deviceDetails.productDevice.uuid,
|
||||||
productType: deviceDetails.productDevice.prodType,
|
productType: deviceDetails.productDevice.prodType,
|
||||||
status: deviceStatus.result[0].status,
|
status: deviceStatus.result[0]?.status,
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new HttpException(
|
throw new HttpException(
|
||||||
@ -164,6 +257,14 @@ export class DeviceStatusFirebaseService {
|
|||||||
async createDeviceStatusFirebase(
|
async createDeviceStatusFirebase(
|
||||||
addDeviceStatusDto: AddDeviceStatusDto,
|
addDeviceStatusDto: AddDeviceStatusDto,
|
||||||
): Promise<any> {
|
): Promise<any> {
|
||||||
|
// Check if Firebase is available
|
||||||
|
if (!this.firebaseDb) {
|
||||||
|
console.warn('Firebase not available, skipping Firebase operations');
|
||||||
|
// Still process the database logs but skip Firebase operations
|
||||||
|
await this.processDeviceStatusLogs(addDeviceStatusDto);
|
||||||
|
return { message: 'Device status processed without Firebase' };
|
||||||
|
}
|
||||||
|
|
||||||
const dataRef = ref(
|
const dataRef = ref(
|
||||||
this.firebaseDb,
|
this.firebaseDb,
|
||||||
`device-status/${addDeviceStatusDto.deviceUuid}`,
|
`device-status/${addDeviceStatusDto.deviceUuid}`,
|
||||||
@ -185,18 +286,18 @@ export class DeviceStatusFirebaseService {
|
|||||||
if (!existingData.productType) {
|
if (!existingData.productType) {
|
||||||
existingData.productType = addDeviceStatusDto.productType;
|
existingData.productType = addDeviceStatusDto.productType;
|
||||||
}
|
}
|
||||||
if (!existingData.status) {
|
if (!existingData?.status) {
|
||||||
existingData.status = [];
|
existingData.status = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a map to track existing status codes
|
// Create a map to track existing status codes
|
||||||
const statusMap = new Map(
|
const statusMap = new Map(
|
||||||
existingData.status.map((item) => [item.code, item.value]),
|
existingData?.status.map((item) => [item.code, item.value]),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Update or add status codes
|
// Update or add status codes
|
||||||
|
|
||||||
for (const statusItem of addDeviceStatusDto.status) {
|
for (const statusItem of addDeviceStatusDto?.status) {
|
||||||
statusMap.set(statusItem.code, statusItem.value);
|
statusMap.set(statusItem.code, statusItem.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,62 +310,131 @@ export class DeviceStatusFirebaseService {
|
|||||||
return existingData;
|
return existingData;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Save logs to your repository
|
|
||||||
const newLogs = addDeviceStatusDto.log.properties.map((property) => {
|
|
||||||
return this.deviceStatusLogRepository.create({
|
|
||||||
deviceId: addDeviceStatusDto.deviceUuid,
|
|
||||||
deviceTuyaId: addDeviceStatusDto.deviceTuyaUuid,
|
|
||||||
productId: addDeviceStatusDto.log.productId,
|
|
||||||
log: addDeviceStatusDto.log,
|
|
||||||
code: property.code,
|
|
||||||
value: property.value,
|
|
||||||
eventId: addDeviceStatusDto.log.dataId,
|
|
||||||
eventTime: new Date(property.time).toISOString(),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
await this.deviceStatusLogRepository.save(newLogs);
|
|
||||||
|
|
||||||
if (addDeviceStatusDto.productType === ProductType.PC) {
|
|
||||||
const energyCodes = new Set([
|
|
||||||
PowerClampEnergyEnum.ENERGY_CONSUMED,
|
|
||||||
PowerClampEnergyEnum.ENERGY_CONSUMED_A,
|
|
||||||
PowerClampEnergyEnum.ENERGY_CONSUMED_B,
|
|
||||||
PowerClampEnergyEnum.ENERGY_CONSUMED_C,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const energyStatus = addDeviceStatusDto?.log?.properties?.find((status) =>
|
|
||||||
energyCodes.has(status.code),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (energyStatus) {
|
|
||||||
await this.powerClampService.updateEnergyConsumedHistoricalData(
|
|
||||||
addDeviceStatusDto.deviceUuid,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
addDeviceStatusDto.productType === ProductType.CPS ||
|
|
||||||
addDeviceStatusDto.productType === ProductType.WPS
|
|
||||||
) {
|
|
||||||
const occupancyCodes = new Set([PresenceSensorEnum.PRESENCE_STATE]);
|
|
||||||
|
|
||||||
const occupancyStatus = addDeviceStatusDto?.log?.properties?.find(
|
|
||||||
(status) => occupancyCodes.has(status.code),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (occupancyStatus) {
|
|
||||||
await this.occupancyService.updateOccupancySensorHistoricalData(
|
|
||||||
addDeviceStatusDto.deviceUuid,
|
|
||||||
);
|
|
||||||
await this.occupancyService.updateOccupancySensorHistoricalDurationData(
|
|
||||||
addDeviceStatusDto.deviceUuid,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the updated data
|
// Return the updated data
|
||||||
const snapshot: DataSnapshot = await get(dataRef);
|
const snapshot: DataSnapshot = await get(dataRef);
|
||||||
return snapshot.val();
|
return snapshot.val();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async processDeviceStatusLogs(addDeviceStatusDto: AddDeviceStatusDto): Promise<void> {
|
||||||
|
if (this.isDevEnv) {
|
||||||
|
// Save logs to your repository
|
||||||
|
const newLogs = addDeviceStatusDto.log.properties.map((property) => {
|
||||||
|
return this.deviceStatusLogRepository.create({
|
||||||
|
deviceId: addDeviceStatusDto.deviceUuid,
|
||||||
|
deviceTuyaId: addDeviceStatusDto.deviceTuyaUuid,
|
||||||
|
productId: addDeviceStatusDto.log.productId,
|
||||||
|
log: addDeviceStatusDto.log,
|
||||||
|
code: property.code,
|
||||||
|
value: property.value,
|
||||||
|
eventId: addDeviceStatusDto.log.dataId,
|
||||||
|
eventTime: new Date(property.time).toISOString(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
await this.deviceStatusLogRepository.save(newLogs);
|
||||||
|
|
||||||
|
if (addDeviceStatusDto.productType === ProductType.PC) {
|
||||||
|
const energyCodes = new Set([
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED,
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED_A,
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED_B,
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED_C,
|
||||||
|
]);
|
||||||
|
|
||||||
|
const energyStatus = addDeviceStatusDto?.log?.properties?.find(
|
||||||
|
(status) => energyCodes.has(status.code),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (energyStatus) {
|
||||||
|
await this.powerClampService.updateEnergyConsumedHistoricalData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
addDeviceStatusDto.productType === ProductType.CPS ||
|
||||||
|
addDeviceStatusDto.productType === ProductType.WPS
|
||||||
|
) {
|
||||||
|
const occupancyCodes = new Set([PresenceSensorEnum.PRESENCE_STATE]);
|
||||||
|
|
||||||
|
const occupancyStatus = addDeviceStatusDto?.log?.properties?.find(
|
||||||
|
(status) => occupancyCodes.has(status.code),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (occupancyStatus) {
|
||||||
|
await this.occupancyService.updateOccupancySensorHistoricalData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
await this.occupancyService.updateOccupancySensorHistoricalDurationData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (addDeviceStatusDto.productType === ProductType.AQI) {
|
||||||
|
await this.aqiDataService.updateAQISensorHistoricalData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Save logs to your repository
|
||||||
|
const newLogs = addDeviceStatusDto?.status.map((property) => {
|
||||||
|
return this.deviceStatusLogRepository.create({
|
||||||
|
deviceId: addDeviceStatusDto.deviceUuid,
|
||||||
|
deviceTuyaId: addDeviceStatusDto.deviceTuyaUuid,
|
||||||
|
productId: addDeviceStatusDto.log.productKey,
|
||||||
|
log: addDeviceStatusDto.log,
|
||||||
|
code: property.code,
|
||||||
|
value: property.value,
|
||||||
|
eventId: addDeviceStatusDto.log.dataId,
|
||||||
|
eventTime: new Date(property.t).toISOString(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
await this.deviceStatusLogRepository.save(newLogs);
|
||||||
|
|
||||||
|
if (addDeviceStatusDto.productType === ProductType.PC) {
|
||||||
|
const energyCodes = new Set([
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED,
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED_A,
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED_B,
|
||||||
|
PowerClampEnergyEnum.ENERGY_CONSUMED_C,
|
||||||
|
]);
|
||||||
|
|
||||||
|
const energyStatus = addDeviceStatusDto?.status?.find((status) => {
|
||||||
|
return energyCodes.has(status.code as PowerClampEnergyEnum);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (energyStatus) {
|
||||||
|
await this.powerClampService.updateEnergyConsumedHistoricalData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
addDeviceStatusDto.productType === ProductType.CPS ||
|
||||||
|
addDeviceStatusDto.productType === ProductType.WPS
|
||||||
|
) {
|
||||||
|
const occupancyCodes = new Set([PresenceSensorEnum.PRESENCE_STATE]);
|
||||||
|
|
||||||
|
const occupancyStatus = addDeviceStatusDto?.status?.find((status) => {
|
||||||
|
return occupancyCodes.has(status.code as PresenceSensorEnum);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (occupancyStatus) {
|
||||||
|
await this.occupancyService.updateOccupancySensorHistoricalData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
await this.occupancyService.updateOccupancySensorHistoricalDurationData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (addDeviceStatusDto.productType === ProductType.AQI) {
|
||||||
|
await this.aqiDataService.updateAQISensorHistoricalData(
|
||||||
|
addDeviceStatusDto.deviceUuid,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,21 +3,32 @@ import { getDatabase } from 'firebase/database';
|
|||||||
import { ConfigService } from '@nestjs/config';
|
import { ConfigService } from '@nestjs/config';
|
||||||
|
|
||||||
export const initializeFirebaseApp = (configService: ConfigService) => {
|
export const initializeFirebaseApp = (configService: ConfigService) => {
|
||||||
const firebaseConfig = {
|
try {
|
||||||
apiKey: configService.get<string>('FIREBASE_API_KEY'),
|
const firebaseConfig = {
|
||||||
authDomain: configService.get<string>('FIREBASE_AUTH_DOMAIN'),
|
apiKey: configService.get<string>('FIREBASE_API_KEY'),
|
||||||
projectId: configService.get<string>('FIREBASE_PROJECT_ID'),
|
authDomain: configService.get<string>('FIREBASE_AUTH_DOMAIN'),
|
||||||
storageBucket: configService.get<string>('FIREBASE_STORAGE_BUCKET'),
|
projectId: configService.get<string>('FIREBASE_PROJECT_ID'),
|
||||||
messagingSenderId: configService.get<string>(
|
storageBucket: configService.get<string>('FIREBASE_STORAGE_BUCKET'),
|
||||||
'FIREBASE_MESSAGING_SENDER_ID',
|
messagingSenderId: configService.get<string>(
|
||||||
),
|
'FIREBASE_MESSAGING_SENDER_ID',
|
||||||
appId: configService.get<string>('FIREBASE_APP_ID'),
|
),
|
||||||
measurementId: configService.get<string>('FIREBASE_MEASUREMENT_ID'),
|
appId: configService.get<string>('FIREBASE_APP_ID'),
|
||||||
databaseURL: configService.get<string>('FIREBASE_DATABASE_URL'),
|
measurementId: configService.get<string>('FIREBASE_MEASUREMENT_ID'),
|
||||||
};
|
databaseURL: configService.get<string>('FIREBASE_DATABASE_URL'),
|
||||||
|
};
|
||||||
|
|
||||||
const app = initializeApp(firebaseConfig);
|
// Check if required Firebase config is available
|
||||||
return getDatabase(app);
|
if (!firebaseConfig.projectId || firebaseConfig.projectId === 'placeholder-project') {
|
||||||
|
console.warn('Firebase configuration not available, Firebase features will be disabled');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const app = initializeApp(firebaseConfig);
|
||||||
|
return getDatabase(app);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Firebase initialization failed, Firebase features will be disabled:', error.message);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const firebaseDataBase = (configService: ConfigService) =>
|
export const firebaseDataBase = (configService: ConfigService) =>
|
||||||
|
@ -8,7 +8,10 @@ import { TuyaWebSocketService } from './services/tuya.web.socket.service';
|
|||||||
import { OneSignalService } from './services/onesignal.service';
|
import { OneSignalService } from './services/onesignal.service';
|
||||||
import { DeviceMessagesService } from './services/device.messages.service';
|
import { DeviceMessagesService } from './services/device.messages.service';
|
||||||
import { DeviceRepositoryModule } from '../modules/device/device.repository.module';
|
import { DeviceRepositoryModule } from '../modules/device/device.repository.module';
|
||||||
import { DeviceNotificationRepository } from '../modules/device/repositories';
|
import {
|
||||||
|
DeviceNotificationRepository,
|
||||||
|
DeviceRepository,
|
||||||
|
} from '../modules/device/repositories';
|
||||||
import { DeviceStatusFirebaseModule } from '../firebase/devices-status/devices-status.module';
|
import { DeviceStatusFirebaseModule } from '../firebase/devices-status/devices-status.module';
|
||||||
import { CommunityPermissionService } from './services/community.permission.service';
|
import { CommunityPermissionService } from './services/community.permission.service';
|
||||||
import { CommunityRepository } from '../modules/community/repositories';
|
import { CommunityRepository } from '../modules/community/repositories';
|
||||||
@ -27,6 +30,7 @@ import { SosHandlerService } from './services/sos.handler.service';
|
|||||||
DeviceNotificationRepository,
|
DeviceNotificationRepository,
|
||||||
CommunityRepository,
|
CommunityRepository,
|
||||||
SosHandlerService,
|
SosHandlerService,
|
||||||
|
DeviceRepository,
|
||||||
],
|
],
|
||||||
exports: [
|
exports: [
|
||||||
HelperHashService,
|
HelperHashService,
|
||||||
|
66
libs/common/src/helper/services/aqi.data.service.ts
Normal file
66
libs/common/src/helper/services/aqi.data.service.ts
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { DataSource } from 'typeorm';
|
||||||
|
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
|
||||||
|
import { SqlLoaderService } from './sql-loader.service';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AqiDataService {
|
||||||
|
constructor(
|
||||||
|
private readonly sqlLoader: SqlLoaderService,
|
||||||
|
private readonly dataSource: DataSource,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async updateAQISensorHistoricalData(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { dateStr } = this.getFormattedDates();
|
||||||
|
|
||||||
|
// Execute all procedures in parallel
|
||||||
|
await Promise.all([
|
||||||
|
this.executeProcedureWithRetry(
|
||||||
|
'proceduce_update_daily_space_aqi',
|
||||||
|
[dateStr],
|
||||||
|
'fact_daily_space_aqi',
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to update AQI sensor historical data:', err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private getFormattedDates(): { dateStr: string } {
|
||||||
|
const now = new Date();
|
||||||
|
return {
|
||||||
|
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
|
||||||
|
};
|
||||||
|
}
|
||||||
|
private async executeProcedureWithRetry(
|
||||||
|
procedureFileName: string,
|
||||||
|
params: (string | number | null)[],
|
||||||
|
folderName: string,
|
||||||
|
retries = 3,
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
const query = this.loadQuery(folderName, procedureFileName);
|
||||||
|
await this.dataSource.query(query, params);
|
||||||
|
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||||
|
} catch (err) {
|
||||||
|
if (retries > 0) {
|
||||||
|
const delayMs = 1000 * (4 - retries); // Exponential backoff
|
||||||
|
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
||||||
|
return this.executeProcedureWithRetry(
|
||||||
|
procedureFileName,
|
||||||
|
params,
|
||||||
|
folderName,
|
||||||
|
retries - 1,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
console.error(`Failed to execute ${procedureFileName}:`, err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private loadQuery(folderName: string, fileName: string): string {
|
||||||
|
return this.sqlLoader.loadQuery(folderName, fileName, SQL_PROCEDURES_PATH);
|
||||||
|
}
|
||||||
|
}
|
@ -1,65 +1,68 @@
|
|||||||
import { DeviceRepository } from '@app/common/modules/device/repositories';
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { SqlLoaderService } from './sql-loader.service';
|
|
||||||
import { DataSource } from 'typeorm';
|
import { DataSource } from 'typeorm';
|
||||||
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
|
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
|
||||||
|
import { SqlLoaderService } from './sql-loader.service';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class OccupancyService {
|
export class OccupancyService {
|
||||||
constructor(
|
constructor(
|
||||||
private readonly sqlLoader: SqlLoaderService,
|
private readonly sqlLoader: SqlLoaderService,
|
||||||
private readonly dataSource: DataSource,
|
private readonly dataSource: DataSource,
|
||||||
private readonly deviceRepository: DeviceRepository,
|
|
||||||
) {}
|
) {}
|
||||||
async updateOccupancySensorHistoricalDurationData(
|
|
||||||
deviceUuid: string,
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
const now = new Date();
|
|
||||||
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
|
|
||||||
const device = await this.deviceRepository.findOne({
|
|
||||||
where: { uuid: deviceUuid },
|
|
||||||
relations: ['spaceDevice'],
|
|
||||||
});
|
|
||||||
|
|
||||||
await this.executeProcedure(
|
async updateOccupancyDataProcedures(): Promise<void> {
|
||||||
'fact_daily_space_occupancy_duration',
|
try {
|
||||||
'procedure_update_daily_space_occupancy_duration',
|
const { dateStr } = this.getFormattedDates();
|
||||||
[dateStr, device.spaceDevice?.uuid],
|
|
||||||
);
|
// Execute all procedures in parallel
|
||||||
|
await Promise.all([
|
||||||
|
this.executeProcedureWithRetry(
|
||||||
|
'procedure_update_fact_space_occupancy',
|
||||||
|
[dateStr],
|
||||||
|
'fact_space_occupancy_count',
|
||||||
|
),
|
||||||
|
this.executeProcedureWithRetry(
|
||||||
|
'procedure_update_daily_space_occupancy_duration',
|
||||||
|
[dateStr],
|
||||||
|
'fact_daily_space_occupancy_duration',
|
||||||
|
),
|
||||||
|
]);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Failed to insert or update occupancy duration data:', err);
|
console.error('Failed to update occupancy data:', err);
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async updateOccupancySensorHistoricalData(deviceUuid: string): Promise<void> {
|
private getFormattedDates(): { dateStr: string } {
|
||||||
try {
|
const now = new Date();
|
||||||
const now = new Date();
|
return {
|
||||||
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
|
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
|
||||||
const device = await this.deviceRepository.findOne({
|
};
|
||||||
where: { uuid: deviceUuid },
|
|
||||||
relations: ['spaceDevice'],
|
|
||||||
});
|
|
||||||
|
|
||||||
await this.executeProcedure(
|
|
||||||
'fact_space_occupancy_count',
|
|
||||||
'procedure_update_fact_space_occupancy',
|
|
||||||
[dateStr, device.spaceDevice?.uuid],
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Failed to insert or update occupancy data:', err);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
private async executeProcedureWithRetry(
|
||||||
private async executeProcedure(
|
|
||||||
procedureFolderName: string,
|
|
||||||
procedureFileName: string,
|
procedureFileName: string,
|
||||||
params: (string | number | null)[],
|
params: (string | number | null)[],
|
||||||
|
folderName: string,
|
||||||
|
retries = 3,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const query = this.loadQuery(procedureFolderName, procedureFileName);
|
try {
|
||||||
await this.dataSource.query(query, params);
|
const query = this.loadQuery(folderName, procedureFileName);
|
||||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
await this.dataSource.query(query, params);
|
||||||
|
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||||
|
} catch (err) {
|
||||||
|
if (retries > 0) {
|
||||||
|
const delayMs = 1000 * (4 - retries); // Exponential backoff
|
||||||
|
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
||||||
|
return this.executeProcedureWithRetry(
|
||||||
|
procedureFileName,
|
||||||
|
params,
|
||||||
|
folderName,
|
||||||
|
retries - 1,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
console.error(`Failed to execute ${procedureFileName}:`, err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private loadQuery(folderName: string, fileName: string): string {
|
private loadQuery(folderName: string, fileName: string): string {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { SqlLoaderService } from './sql-loader.service';
|
|
||||||
import { DataSource } from 'typeorm';
|
import { DataSource } from 'typeorm';
|
||||||
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
|
import { SQL_PROCEDURES_PATH } from '@app/common/constants/sql-query-path';
|
||||||
|
import { SqlLoaderService } from './sql-loader.service';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class PowerClampService {
|
export class PowerClampService {
|
||||||
@ -10,48 +10,72 @@ export class PowerClampService {
|
|||||||
private readonly dataSource: DataSource,
|
private readonly dataSource: DataSource,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
async updateEnergyConsumedHistoricalData(deviceUuid: string): Promise<void> {
|
async updateEnergyConsumedHistoricalData(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const now = new Date();
|
const { dateStr, monthYear } = this.getFormattedDates();
|
||||||
const dateStr = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
|
|
||||||
const hour = now.getHours();
|
|
||||||
const monthYear = now
|
|
||||||
.toLocaleDateString('en-US', {
|
|
||||||
month: '2-digit',
|
|
||||||
year: 'numeric',
|
|
||||||
})
|
|
||||||
.replace('/', '-'); // MM-YYYY
|
|
||||||
|
|
||||||
await this.executeProcedure(
|
// Execute all procedures in parallel
|
||||||
'fact_hourly_device_energy_consumed_procedure',
|
await Promise.all([
|
||||||
[deviceUuid, dateStr, hour],
|
this.executeProcedureWithRetry(
|
||||||
);
|
'fact_hourly_device_energy_consumed_procedure',
|
||||||
|
[dateStr],
|
||||||
await this.executeProcedure(
|
'fact_device_energy_consumed',
|
||||||
'fact_daily_device_energy_consumed_procedure',
|
),
|
||||||
[deviceUuid, dateStr],
|
this.executeProcedureWithRetry(
|
||||||
);
|
'fact_daily_device_energy_consumed_procedure',
|
||||||
|
[dateStr],
|
||||||
await this.executeProcedure(
|
'fact_device_energy_consumed',
|
||||||
'fact_monthly_device_energy_consumed_procedure',
|
),
|
||||||
[deviceUuid, monthYear],
|
this.executeProcedureWithRetry(
|
||||||
);
|
'fact_monthly_device_energy_consumed_procedure',
|
||||||
|
[monthYear],
|
||||||
|
'fact_device_energy_consumed',
|
||||||
|
),
|
||||||
|
]);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Failed to insert or update energy data:', err);
|
console.error('Failed to update energy consumption data:', err);
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async executeProcedure(
|
private getFormattedDates(): { dateStr: string; monthYear: string } {
|
||||||
|
const now = new Date();
|
||||||
|
return {
|
||||||
|
dateStr: now.toLocaleDateString('en-CA'), // YYYY-MM-DD
|
||||||
|
monthYear: now
|
||||||
|
.toLocaleDateString('en-US', {
|
||||||
|
month: '2-digit',
|
||||||
|
year: 'numeric',
|
||||||
|
})
|
||||||
|
.replace('/', '-'), // MM-YYYY
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeProcedureWithRetry(
|
||||||
procedureFileName: string,
|
procedureFileName: string,
|
||||||
params: (string | number | null)[],
|
params: (string | number | null)[],
|
||||||
|
folderName: string,
|
||||||
|
retries = 3,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const query = this.loadQuery(
|
try {
|
||||||
'fact_device_energy_consumed',
|
const query = this.loadQuery(folderName, procedureFileName);
|
||||||
procedureFileName,
|
await this.dataSource.query(query, params);
|
||||||
);
|
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
||||||
await this.dataSource.query(query, params);
|
} catch (err) {
|
||||||
console.log(`Procedure ${procedureFileName} executed successfully.`);
|
if (retries > 0) {
|
||||||
|
const delayMs = 1000 * (4 - retries); // Exponential backoff
|
||||||
|
console.warn(`Retrying ${procedureFileName} (${retries} retries left)`);
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
||||||
|
return this.executeProcedureWithRetry(
|
||||||
|
procedureFileName,
|
||||||
|
params,
|
||||||
|
folderName,
|
||||||
|
retries - 1,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
console.error(`Failed to execute ${procedureFileName}:`, err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private loadQuery(folderName: string, fileName: string): string {
|
private loadQuery(folderName: string, fileName: string): string {
|
||||||
|
@ -16,21 +16,46 @@ export class SosHandlerService {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleSosEvent(devId: string, logData: any): Promise<void> {
|
async handleSosEventFirebase(device: any, logData: any): Promise<void> {
|
||||||
|
const sosTrueStatus = [{ code: 'sos', value: true }];
|
||||||
|
const sosFalseStatus = [{ code: 'sos', value: false }];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// ✅ Send true status
|
||||||
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
|
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
|
||||||
deviceTuyaUuid: devId,
|
deviceTuyaUuid: device.deviceTuyaUuid,
|
||||||
status: [{ code: 'sos', value: true }],
|
status: sosTrueStatus,
|
||||||
log: logData,
|
log: logData,
|
||||||
|
device,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
|
||||||
|
{
|
||||||
|
deviceTuyaUuid: device.deviceTuyaUuid,
|
||||||
|
status: sosTrueStatus,
|
||||||
|
log: logData,
|
||||||
|
device,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
// ✅ Schedule false status
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
try {
|
try {
|
||||||
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
|
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
|
||||||
deviceTuyaUuid: devId,
|
deviceTuyaUuid: device.deviceTuyaUuid,
|
||||||
status: [{ code: 'sos', value: false }],
|
status: sosFalseStatus,
|
||||||
log: logData,
|
log: logData,
|
||||||
|
device,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb([
|
||||||
|
{
|
||||||
|
deviceTuyaUuid: device.deviceTuyaUuid,
|
||||||
|
status: sosFalseStatus,
|
||||||
|
log: logData,
|
||||||
|
device,
|
||||||
|
},
|
||||||
|
]);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.logger.error('Failed to send SOS false value', err);
|
this.logger.error('Failed to send SOS false value', err);
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,24 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
|
||||||
import TuyaWebsocket from '../../config/tuya-web-socket-config';
|
|
||||||
import { ConfigService } from '@nestjs/config';
|
|
||||||
import { DeviceStatusFirebaseService } from '@app/common/firebase/devices-status/services/devices-status.service';
|
import { DeviceStatusFirebaseService } from '@app/common/firebase/devices-status/services/devices-status.service';
|
||||||
|
import { Injectable, OnModuleInit } from '@nestjs/common';
|
||||||
|
import { ConfigService } from '@nestjs/config';
|
||||||
|
import * as NodeCache from 'node-cache';
|
||||||
|
import TuyaWebsocket from '../../config/tuya-web-socket-config';
|
||||||
import { SosHandlerService } from './sos.handler.service';
|
import { SosHandlerService } from './sos.handler.service';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class TuyaWebSocketService {
|
export class TuyaWebSocketService implements OnModuleInit {
|
||||||
private client: any;
|
private client: any;
|
||||||
private readonly isDevEnv: boolean;
|
private readonly isDevEnv: boolean;
|
||||||
|
private readonly deviceCache = new NodeCache({ stdTTL: 7200 }); // TTL = 2 hour
|
||||||
|
|
||||||
|
private messageQueue: {
|
||||||
|
devId: string;
|
||||||
|
status: any;
|
||||||
|
logData: any;
|
||||||
|
device: any;
|
||||||
|
}[] = [];
|
||||||
|
|
||||||
|
private isProcessing = false;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly configService: ConfigService,
|
private readonly configService: ConfigService,
|
||||||
@ -26,16 +37,36 @@ export class TuyaWebSocketService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (this.configService.get<string>('tuya-config.TRUN_ON_TUYA_SOCKET')) {
|
if (this.configService.get<string>('tuya-config.TRUN_ON_TUYA_SOCKET')) {
|
||||||
// Set up event handlers
|
|
||||||
this.setupEventHandlers();
|
this.setupEventHandlers();
|
||||||
|
|
||||||
// Start receiving messages
|
|
||||||
this.client.start();
|
this.client.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Run the queue processor every 15 seconds
|
||||||
|
setInterval(() => this.processQueue(), 15000);
|
||||||
|
|
||||||
|
// Refresh the cache every 1 hour
|
||||||
|
setInterval(() => this.initializeDeviceCache(), 30 * 60 * 1000); // 30 minutes
|
||||||
|
}
|
||||||
|
|
||||||
|
async onModuleInit() {
|
||||||
|
await this.initializeDeviceCache();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async initializeDeviceCache() {
|
||||||
|
try {
|
||||||
|
const allDevices = await this.deviceStatusFirebaseService.getAllDevices();
|
||||||
|
allDevices.forEach((device) => {
|
||||||
|
if (device.deviceTuyaUuid) {
|
||||||
|
this.deviceCache.set(device.deviceTuyaUuid, device);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
console.log(`✅ Refreshed cache with ${allDevices.length} devices.`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to initialize device cache:', error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private setupEventHandlers() {
|
private setupEventHandlers() {
|
||||||
// Event handlers
|
|
||||||
this.client.open(() => {
|
this.client.open(() => {
|
||||||
console.log('open');
|
console.log('open');
|
||||||
});
|
});
|
||||||
@ -43,23 +74,43 @@ export class TuyaWebSocketService {
|
|||||||
this.client.message(async (ws: WebSocket, message: any) => {
|
this.client.message(async (ws: WebSocket, message: any) => {
|
||||||
try {
|
try {
|
||||||
const { devId, status, logData } = this.extractMessageData(message);
|
const { devId, status, logData } = this.extractMessageData(message);
|
||||||
|
// console.log(
|
||||||
|
// `📬 Received message for device: ${devId}, status:`,
|
||||||
|
// status,
|
||||||
|
// logData,
|
||||||
|
// );
|
||||||
|
if (!Array.isArray(status)) {
|
||||||
|
this.client.ackMessage(message.messageId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const device = this.deviceCache.get(devId);
|
||||||
|
if (!device) {
|
||||||
|
// console.log(⛔ Unknown device: ${devId}, message ignored.);
|
||||||
|
this.client.ackMessage(message.messageId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (this.sosHandlerService.isSosTriggered(status)) {
|
if (this.sosHandlerService.isSosTriggered(status)) {
|
||||||
await this.sosHandlerService.handleSosEvent(devId, logData);
|
await this.sosHandlerService.handleSosEventFirebase(devId, logData);
|
||||||
} else {
|
} else {
|
||||||
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
|
await this.deviceStatusFirebaseService.addDeviceStatusToFirebase({
|
||||||
deviceTuyaUuid: devId,
|
deviceTuyaUuid: devId,
|
||||||
status: status,
|
status,
|
||||||
log: logData,
|
log: logData,
|
||||||
|
device,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Push to internal queue
|
||||||
|
this.messageQueue.push({ devId, status, logData, device });
|
||||||
|
|
||||||
|
// Acknowledge the message
|
||||||
this.client.ackMessage(message.messageId);
|
this.client.ackMessage(message.messageId);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error processing message:', error);
|
console.error('❌ Error receiving message:', error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
this.client.reconnect(() => {
|
this.client.reconnect(() => {
|
||||||
console.log('reconnect');
|
console.log('reconnect');
|
||||||
});
|
});
|
||||||
@ -80,11 +131,44 @@ export class TuyaWebSocketService {
|
|||||||
console.error('WebSocket error:', error);
|
console.error('WebSocket error:', error);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
private async processQueue() {
|
||||||
|
if (this.isProcessing) {
|
||||||
|
console.log('⏳ Skipping: still processing previous batch');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.messageQueue.length === 0) return;
|
||||||
|
|
||||||
|
this.isProcessing = true;
|
||||||
|
const batch = [...this.messageQueue];
|
||||||
|
this.messageQueue = [];
|
||||||
|
|
||||||
|
console.log(`🔁 Processing batch of size: ${batch.length}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.deviceStatusFirebaseService.addBatchDeviceStatusToOurDb(
|
||||||
|
batch.map((item) => ({
|
||||||
|
deviceTuyaUuid: item.devId,
|
||||||
|
status: item.status,
|
||||||
|
log: item.logData,
|
||||||
|
device: item.device,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error processing batch:', error);
|
||||||
|
this.messageQueue.unshift(...batch); // retry
|
||||||
|
} finally {
|
||||||
|
this.isProcessing = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private extractMessageData(message: any): {
|
private extractMessageData(message: any): {
|
||||||
devId: string;
|
devId: string;
|
||||||
status: any;
|
status: any;
|
||||||
logData: any;
|
logData: any;
|
||||||
} {
|
} {
|
||||||
|
// console.log('Received message:', message);
|
||||||
|
|
||||||
const payloadData = message.payload.data;
|
const payloadData = message.payload.data;
|
||||||
|
|
||||||
if (this.isDevEnv) {
|
if (this.isDevEnv) {
|
||||||
|
5
libs/common/src/helper/timeToMinutes.ts
Normal file
5
libs/common/src/helper/timeToMinutes.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
// Convert time string (HH:mm) to minutes
|
||||||
|
export function timeToMinutes(time: string): number {
|
||||||
|
const [hours, minutes] = time.split(':').map(Number);
|
||||||
|
return hours * 60 + minutes;
|
||||||
|
}
|
@ -49,12 +49,12 @@ export class TuyaService {
|
|||||||
path,
|
path,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!response.success) {
|
// if (!response.success) {
|
||||||
throw new HttpException(
|
// throw new HttpException(
|
||||||
`Error fetching device details: ${response.msg}`,
|
// `Error fetching device details: ${response.msg}`,
|
||||||
HttpStatus.BAD_REQUEST,
|
// HttpStatus.BAD_REQUEST,
|
||||||
);
|
// );
|
||||||
}
|
// }
|
||||||
|
|
||||||
return response.result;
|
return response.result;
|
||||||
}
|
}
|
||||||
|
@ -1,43 +1,26 @@
|
|||||||
import { utilities as nestWinstonModuleUtilities } from 'nest-winston';
|
import { utilities as nestWinstonModuleUtilities } from 'nest-winston';
|
||||||
import * as winston from 'winston';
|
import * as winston from 'winston';
|
||||||
const environment = process.env.NODE_ENV || 'local';
|
|
||||||
|
|
||||||
export const winstonLoggerOptions: winston.LoggerOptions = {
|
export const winstonLoggerOptions: winston.LoggerOptions = {
|
||||||
level:
|
level:
|
||||||
environment === 'local'
|
process.env.AZURE_POSTGRESQL_DATABASE === 'development' ? 'debug' : 'error',
|
||||||
? 'debug'
|
|
||||||
: environment === 'development'
|
|
||||||
? 'warn'
|
|
||||||
: 'error',
|
|
||||||
transports: [
|
transports: [
|
||||||
new winston.transports.Console({
|
new winston.transports.Console({
|
||||||
level:
|
|
||||||
environment === 'local'
|
|
||||||
? 'debug'
|
|
||||||
: environment === 'development'
|
|
||||||
? 'warn'
|
|
||||||
: 'error',
|
|
||||||
format: winston.format.combine(
|
format: winston.format.combine(
|
||||||
winston.format.timestamp(),
|
winston.format.timestamp(),
|
||||||
nestWinstonModuleUtilities.format.nestLike('MyApp', {
|
nestWinstonModuleUtilities.format.nestLike('MyApp', {
|
||||||
prettyPrint: environment === 'local',
|
prettyPrint: true,
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
}),
|
}),
|
||||||
// Only create file logs if NOT local
|
new winston.transports.File({
|
||||||
...(environment !== 'local'
|
filename: 'logs/error.log',
|
||||||
? [
|
level: 'error',
|
||||||
new winston.transports.File({
|
format: winston.format.json(),
|
||||||
filename: 'logs/error.log',
|
}),
|
||||||
level: 'error',
|
new winston.transports.File({
|
||||||
format: winston.format.json(),
|
filename: 'logs/combined.log',
|
||||||
}),
|
format: winston.format.json(),
|
||||||
new winston.transports.File({
|
}),
|
||||||
filename: 'logs/combined.log',
|
|
||||||
level: 'info',
|
|
||||||
format: winston.format.json(),
|
|
||||||
}),
|
|
||||||
]
|
|
||||||
: []),
|
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { RoleType } from '@app/common/constants/role.type.enum';
|
import { RoleType } from '@app/common/constants/role.type.enum';
|
||||||
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
|
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
|
||||||
import { IsEnum, IsNotEmpty, IsString } from 'class-validator';
|
import { IsEnum, IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
||||||
|
|
||||||
export class InviteUserDto {
|
export class InviteUserDto {
|
||||||
@IsString()
|
@IsString()
|
||||||
@ -12,8 +12,12 @@ export class InviteUserDto {
|
|||||||
public email: string;
|
public email: string;
|
||||||
|
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsOptional()
|
||||||
public jobTitle: string;
|
public jobTitle?: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@IsOptional()
|
||||||
|
public companyName?: string;
|
||||||
|
|
||||||
@IsEnum(UserStatusEnum)
|
@IsEnum(UserStatusEnum)
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
|
@ -8,14 +8,14 @@ import {
|
|||||||
Unique,
|
Unique,
|
||||||
} from 'typeorm';
|
} from 'typeorm';
|
||||||
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
|
||||||
import { RoleTypeEntity } from '../../role-type/entities';
|
|
||||||
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
|
|
||||||
import { UserEntity } from '../../user/entities';
|
|
||||||
import { RoleType } from '@app/common/constants/role.type.enum';
|
import { RoleType } from '@app/common/constants/role.type.enum';
|
||||||
import { InviteUserDto, InviteUserSpaceDto } from '../dtos';
|
import { UserStatusEnum } from '@app/common/constants/user-status.enum';
|
||||||
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
import { ProjectEntity } from '../../project/entities';
|
import { ProjectEntity } from '../../project/entities';
|
||||||
|
import { RoleTypeEntity } from '../../role-type/entities';
|
||||||
import { SpaceEntity } from '../../space/entities/space.entity';
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
|
import { UserEntity } from '../../user/entities';
|
||||||
|
import { InviteUserDto, InviteUserSpaceDto } from '../dtos';
|
||||||
|
|
||||||
@Entity({ name: 'invite-user' })
|
@Entity({ name: 'invite-user' })
|
||||||
@Unique(['email', 'project'])
|
@Unique(['email', 'project'])
|
||||||
@ -37,6 +37,11 @@ export class InviteUserEntity extends AbstractEntity<InviteUserDto> {
|
|||||||
})
|
})
|
||||||
jobTitle: string;
|
jobTitle: string;
|
||||||
|
|
||||||
|
@Column({
|
||||||
|
nullable: true,
|
||||||
|
})
|
||||||
|
companyName: string;
|
||||||
|
|
||||||
@Column({
|
@Column({
|
||||||
nullable: false,
|
nullable: false,
|
||||||
enum: Object.values(UserStatusEnum),
|
enum: Object.values(UserStatusEnum),
|
||||||
@ -82,7 +87,10 @@ export class InviteUserEntity extends AbstractEntity<InviteUserDto> {
|
|||||||
onDelete: 'CASCADE',
|
onDelete: 'CASCADE',
|
||||||
})
|
})
|
||||||
public roleType: RoleTypeEntity;
|
public roleType: RoleTypeEntity;
|
||||||
@OneToOne(() => UserEntity, (user) => user.inviteUser, { nullable: true })
|
@OneToOne(() => UserEntity, (user) => user.inviteUser, {
|
||||||
|
nullable: true,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
})
|
||||||
@JoinColumn({ name: 'user_uuid' })
|
@JoinColumn({ name: 'user_uuid' })
|
||||||
user: UserEntity;
|
user: UserEntity;
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
@ -112,7 +120,9 @@ export class InviteUserSpaceEntity extends AbstractEntity<InviteUserSpaceDto> {
|
|||||||
})
|
})
|
||||||
public uuid: string;
|
public uuid: string;
|
||||||
|
|
||||||
@ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces)
|
@ManyToOne(() => InviteUserEntity, (inviteUser) => inviteUser.spaces, {
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
})
|
||||||
@JoinColumn({ name: 'invite_user_uuid' })
|
@JoinColumn({ name: 'invite_user_uuid' })
|
||||||
public inviteUser: InviteUserEntity;
|
public inviteUser: InviteUserEntity;
|
||||||
|
|
||||||
|
11
libs/common/src/modules/aqi/aqi.repository.module.ts
Normal file
11
libs/common/src/modules/aqi/aqi.repository.module.ts
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import { Module } from '@nestjs/common';
|
||||||
|
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||||
|
import { AqiSpaceDailyPollutantStatsEntity } from './entities/aqi.entity';
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
providers: [],
|
||||||
|
exports: [],
|
||||||
|
controllers: [],
|
||||||
|
imports: [TypeOrmModule.forFeature([AqiSpaceDailyPollutantStatsEntity])],
|
||||||
|
})
|
||||||
|
export class AqiRepositoryModule {}
|
82
libs/common/src/modules/aqi/dtos/aqi.dto.ts
Normal file
82
libs/common/src/modules/aqi/dtos/aqi.dto.ts
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
|
||||||
|
|
||||||
|
export class AqiSpaceDailyPollutantStatsDto {
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
public uuid: string;
|
||||||
|
|
||||||
|
@IsNotEmpty()
|
||||||
|
@IsString()
|
||||||
|
spaceUuid: string;
|
||||||
|
|
||||||
|
@IsNotEmpty()
|
||||||
|
@IsString()
|
||||||
|
eventDay: string;
|
||||||
|
|
||||||
|
@IsNotEmpty()
|
||||||
|
@IsNumber()
|
||||||
|
eventHour: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm1Min: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm1Avg: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm1Max: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm10Min: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm10Avg: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm10Max: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm25Min: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm25Avg: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
pm25Max: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
ch2oMin: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
ch2oAvg: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
ch2oMax: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
vocMin: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
vocAvg: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
vocMax: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
co2Min: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
co2Avg: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
co2Max: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
aqiMin: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
aqiAvg: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
aqiMax: number;
|
||||||
|
}
|
1
libs/common/src/modules/aqi/dtos/index.ts
Normal file
1
libs/common/src/modules/aqi/dtos/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from './aqi.dto';
|
184
libs/common/src/modules/aqi/entities/aqi.entity.ts
Normal file
184
libs/common/src/modules/aqi/entities/aqi.entity.ts
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
|
||||||
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
|
import { AqiSpaceDailyPollutantStatsDto } from '../dtos';
|
||||||
|
|
||||||
|
@Entity({ name: 'space-daily-pollutant-stats' })
|
||||||
|
@Unique(['spaceUuid', 'eventDate'])
|
||||||
|
export class AqiSpaceDailyPollutantStatsEntity extends AbstractEntity<AqiSpaceDailyPollutantStatsDto> {
|
||||||
|
@Column({ nullable: false })
|
||||||
|
public spaceUuid: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => SpaceEntity, (space) => space.aqiSensorDaily)
|
||||||
|
space: SpaceEntity;
|
||||||
|
|
||||||
|
@Column({ type: 'date', nullable: false })
|
||||||
|
public eventDate: Date;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public goodAqiPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public moderateAqiPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthySensitiveAqiPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthyAqiPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public veryUnhealthyAqiPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public hazardousAqiPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyAvgAqi?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMaxAqi?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMinAqi?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public goodPm25Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public moderatePm25Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthySensitivePm25Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthyPm25Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public veryUnhealthyPm25Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public hazardousPm25Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyAvgPm25?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMaxPm25?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMinPm25?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public goodPm10Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public moderatePm10Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthySensitivePm10Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthyPm10Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public veryUnhealthyPm10Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public hazardousPm10Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyAvgPm10?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMaxPm10?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMinPm10?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public goodVocPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public moderateVocPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthySensitiveVocPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthyVocPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public veryUnhealthyVocPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public hazardousVocPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyAvgVoc?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMaxVoc?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMinVoc?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public goodCo2Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public moderateCo2Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthySensitiveCo2Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthyCo2Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public veryUnhealthyCo2Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public hazardousCo2Percentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyAvgCo2?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMaxCo2?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMinCo2?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public goodCh2oPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public moderateCh2oPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthySensitiveCh2oPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public unhealthyCh2oPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public veryUnhealthyCh2oPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public hazardousCh2oPercentage?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyAvgCh2o?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMaxCh2o?: number;
|
||||||
|
|
||||||
|
@Column('float', { nullable: true })
|
||||||
|
public dailyMinCh2o?: number;
|
||||||
|
|
||||||
|
constructor(partial: Partial<AqiSpaceDailyPollutantStatsEntity>) {
|
||||||
|
super();
|
||||||
|
Object.assign(this, partial);
|
||||||
|
}
|
||||||
|
}
|
1
libs/common/src/modules/aqi/entities/index.ts
Normal file
1
libs/common/src/modules/aqi/entities/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from './aqi.entity';
|
10
libs/common/src/modules/aqi/repositories/aqi.repository.ts
Normal file
10
libs/common/src/modules/aqi/repositories/aqi.repository.ts
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { DataSource, Repository } from 'typeorm';
|
||||||
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { AqiSpaceDailyPollutantStatsEntity } from '../entities';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AqiSpaceDailyPollutantStatsRepository extends Repository<AqiSpaceDailyPollutantStatsEntity> {
|
||||||
|
constructor(private dataSource: DataSource) {
|
||||||
|
super(AqiSpaceDailyPollutantStatsEntity, dataSource.createEntityManager());
|
||||||
|
}
|
||||||
|
}
|
1
libs/common/src/modules/aqi/repositories/index.ts
Normal file
1
libs/common/src/modules/aqi/repositories/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from './aqi.repository';
|
12
libs/common/src/modules/booking/booking.repository.module.ts
Normal file
12
libs/common/src/modules/booking/booking.repository.module.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { Module } from '@nestjs/common';
|
||||||
|
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||||
|
import { BookableSpaceEntity } from './entities/bookable-space.entity';
|
||||||
|
import { BookingEntity } from './entities/booking.entity';
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
providers: [],
|
||||||
|
exports: [],
|
||||||
|
controllers: [],
|
||||||
|
imports: [TypeOrmModule.forFeature([BookableSpaceEntity, BookingEntity])],
|
||||||
|
})
|
||||||
|
export class BookingRepositoryModule {}
|
@ -0,0 +1,51 @@
|
|||||||
|
import { DaysEnum } from '@app/common/constants/days.enum';
|
||||||
|
import {
|
||||||
|
Column,
|
||||||
|
CreateDateColumn,
|
||||||
|
Entity,
|
||||||
|
JoinColumn,
|
||||||
|
OneToOne,
|
||||||
|
UpdateDateColumn,
|
||||||
|
} from 'typeorm';
|
||||||
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
|
|
||||||
|
@Entity('bookable-space')
|
||||||
|
export class BookableSpaceEntity extends AbstractEntity {
|
||||||
|
@Column({
|
||||||
|
type: 'uuid',
|
||||||
|
default: () => 'gen_random_uuid()',
|
||||||
|
nullable: false,
|
||||||
|
})
|
||||||
|
public uuid: string;
|
||||||
|
|
||||||
|
@OneToOne(() => SpaceEntity, (space) => space.bookableConfig)
|
||||||
|
@JoinColumn({ name: 'space_uuid' })
|
||||||
|
space: SpaceEntity;
|
||||||
|
|
||||||
|
@Column({
|
||||||
|
type: 'enum',
|
||||||
|
enum: DaysEnum,
|
||||||
|
array: true,
|
||||||
|
nullable: false,
|
||||||
|
})
|
||||||
|
daysAvailable: DaysEnum[];
|
||||||
|
|
||||||
|
@Column({ type: 'time' })
|
||||||
|
startTime: string;
|
||||||
|
|
||||||
|
@Column({ type: 'time' })
|
||||||
|
endTime: string;
|
||||||
|
|
||||||
|
@Column({ type: Boolean, default: true })
|
||||||
|
active: boolean;
|
||||||
|
|
||||||
|
@Column({ type: 'int', default: null })
|
||||||
|
points?: number;
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@UpdateDateColumn()
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
44
libs/common/src/modules/booking/entities/booking.entity.ts
Normal file
44
libs/common/src/modules/booking/entities/booking.entity.ts
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import {
|
||||||
|
Column,
|
||||||
|
CreateDateColumn,
|
||||||
|
Entity,
|
||||||
|
ManyToOne,
|
||||||
|
UpdateDateColumn,
|
||||||
|
} from 'typeorm';
|
||||||
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
|
import { UserEntity } from '../../user/entities';
|
||||||
|
|
||||||
|
@Entity('booking')
|
||||||
|
export class BookingEntity extends AbstractEntity {
|
||||||
|
@Column({
|
||||||
|
type: 'uuid',
|
||||||
|
default: () => 'gen_random_uuid()',
|
||||||
|
nullable: false,
|
||||||
|
})
|
||||||
|
public uuid: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => SpaceEntity, (space) => space.bookableConfig)
|
||||||
|
space: SpaceEntity;
|
||||||
|
|
||||||
|
@ManyToOne(() => UserEntity, (user) => user.bookings)
|
||||||
|
user: UserEntity;
|
||||||
|
|
||||||
|
@Column({ type: Date, nullable: false })
|
||||||
|
date: Date;
|
||||||
|
|
||||||
|
@Column({ type: 'time' })
|
||||||
|
startTime: string;
|
||||||
|
|
||||||
|
@Column({ type: 'time' })
|
||||||
|
endTime: string;
|
||||||
|
|
||||||
|
@Column({ type: 'int', default: null })
|
||||||
|
cost?: number;
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@UpdateDateColumn()
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
@ -0,0 +1,10 @@
|
|||||||
|
import { DataSource, Repository } from 'typeorm';
|
||||||
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { BookableSpaceEntity } from '../entities/bookable-space.entity';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class BookableSpaceEntityRepository extends Repository<BookableSpaceEntity> {
|
||||||
|
constructor(private dataSource: DataSource) {
|
||||||
|
super(BookableSpaceEntity, dataSource.createEntityManager());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,10 @@
|
|||||||
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { DataSource, Repository } from 'typeorm';
|
||||||
|
import { BookingEntity } from '../entities/booking.entity';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class BookingEntityRepository extends Repository<BookingEntity> {
|
||||||
|
constructor(private dataSource: DataSource) {
|
||||||
|
super(BookingEntity, dataSource.createEntityManager());
|
||||||
|
}
|
||||||
|
}
|
@ -2,15 +2,15 @@ import { SourceType } from '@app/common/constants/source-type.enum';
|
|||||||
import { Entity, Column, PrimaryColumn, Unique } from 'typeorm';
|
import { Entity, Column, PrimaryColumn, Unique } from 'typeorm';
|
||||||
|
|
||||||
@Entity('device-status-log')
|
@Entity('device-status-log')
|
||||||
@Unique('event_time_idx', ['eventTime'])
|
@Unique('event_time_idx', ['eventTime', 'deviceId', 'code', 'value'])
|
||||||
export class DeviceStatusLogEntity {
|
export class DeviceStatusLogEntity {
|
||||||
@Column({ type: 'int', generated: true, unsigned: true })
|
@PrimaryColumn({ type: 'int', generated: true, unsigned: true })
|
||||||
id: number;
|
id: number;
|
||||||
|
|
||||||
@Column({ type: 'text' })
|
@Column({ type: 'text' })
|
||||||
eventId: string;
|
eventId: string;
|
||||||
|
|
||||||
@PrimaryColumn({ type: 'timestamptz' })
|
@Column({ type: 'timestamptz' })
|
||||||
eventTime: Date;
|
eventTime: Date;
|
||||||
|
|
||||||
@Column({
|
@Column({
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
import {
|
import {
|
||||||
Column,
|
Column,
|
||||||
Entity,
|
Entity,
|
||||||
|
Index,
|
||||||
|
JoinColumn,
|
||||||
ManyToOne,
|
ManyToOne,
|
||||||
OneToMany,
|
OneToMany,
|
||||||
Unique,
|
Unique,
|
||||||
Index,
|
|
||||||
JoinColumn,
|
|
||||||
} from 'typeorm';
|
} from 'typeorm';
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
|
|
||||||
import { ProductEntity } from '../../product/entities';
|
|
||||||
import { UserEntity } from '../../user/entities';
|
|
||||||
import { DeviceNotificationDto } from '../dtos';
|
|
||||||
import { PermissionTypeEntity } from '../../permission/entities';
|
import { PermissionTypeEntity } from '../../permission/entities';
|
||||||
|
import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
|
||||||
|
import { PresenceSensorDailyDeviceEntity } from '../../presence-sensor/entities';
|
||||||
|
import { ProductEntity } from '../../product/entities';
|
||||||
import { SceneDeviceEntity } from '../../scene-device/entities';
|
import { SceneDeviceEntity } from '../../scene-device/entities';
|
||||||
import { SpaceEntity } from '../../space/entities/space.entity';
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
import { SubspaceEntity } from '../../space/entities/subspace/subspace.entity';
|
import { SubspaceEntity } from '../../space/entities/subspace/subspace.entity';
|
||||||
import { NewTagEntity } from '../../tag';
|
import { NewTagEntity } from '../../tag';
|
||||||
import { PowerClampHourlyEntity } from '../../power-clamp/entities/power-clamp.entity';
|
import { UserEntity } from '../../user/entities';
|
||||||
import { PresenceSensorDailyDeviceEntity } from '../../presence-sensor/entities';
|
import { DeviceNotificationDto } from '../dtos';
|
||||||
|
import { DeviceDto, DeviceUserPermissionDto } from '../dtos/device.dto';
|
||||||
|
|
||||||
@Entity({ name: 'device' })
|
@Entity({ name: 'device' })
|
||||||
@Unique(['deviceTuyaUuid'])
|
@Unique(['deviceTuyaUuid'])
|
||||||
@ -28,6 +28,11 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
|
|||||||
})
|
})
|
||||||
deviceTuyaUuid: string;
|
deviceTuyaUuid: string;
|
||||||
|
|
||||||
|
@Column({
|
||||||
|
nullable: true,
|
||||||
|
})
|
||||||
|
deviceTuyaConstUuid: string;
|
||||||
|
|
||||||
@Column({
|
@Column({
|
||||||
nullable: true,
|
nullable: true,
|
||||||
default: true,
|
default: true,
|
||||||
@ -78,8 +83,8 @@ export class DeviceEntity extends AbstractEntity<DeviceDto> {
|
|||||||
@OneToMany(() => SceneDeviceEntity, (sceneDevice) => sceneDevice.device, {})
|
@OneToMany(() => SceneDeviceEntity, (sceneDevice) => sceneDevice.device, {})
|
||||||
sceneDevices: SceneDeviceEntity[];
|
sceneDevices: SceneDeviceEntity[];
|
||||||
|
|
||||||
@OneToMany(() => NewTagEntity, (tag) => tag.devices)
|
@ManyToOne(() => NewTagEntity, (tag) => tag.devices)
|
||||||
// @JoinTable({ name: 'device_tags' })
|
@JoinColumn({ name: 'tag_uuid' })
|
||||||
public tag: NewTagEntity;
|
public tag: NewTagEntity;
|
||||||
@OneToMany(() => PowerClampHourlyEntity, (powerClamp) => powerClamp.device)
|
@OneToMany(() => PowerClampHourlyEntity, (powerClamp) => powerClamp.device)
|
||||||
powerClampHourly: PowerClampHourlyEntity[];
|
powerClampHourly: PowerClampHourlyEntity[];
|
||||||
@ -111,6 +116,7 @@ export class DeviceNotificationEntity extends AbstractEntity<DeviceNotificationD
|
|||||||
|
|
||||||
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
|
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
|
||||||
nullable: false,
|
nullable: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
})
|
})
|
||||||
user: UserEntity;
|
user: UserEntity;
|
||||||
|
|
||||||
@ -149,6 +155,7 @@ export class DeviceUserPermissionEntity extends AbstractEntity<DeviceUserPermiss
|
|||||||
|
|
||||||
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
|
@ManyToOne(() => UserEntity, (user) => user.userPermission, {
|
||||||
nullable: false,
|
nullable: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
})
|
})
|
||||||
user: UserEntity;
|
user: UserEntity;
|
||||||
constructor(partial: Partial<DeviceUserPermissionEntity>) {
|
constructor(partial: Partial<DeviceUserPermissionEntity>) {
|
||||||
|
1
libs/common/src/modules/occupancy/dtos/index.ts
Normal file
1
libs/common/src/modules/occupancy/dtos/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from './occupancy.dto';
|
23
libs/common/src/modules/occupancy/dtos/occupancy.dto.ts
Normal file
23
libs/common/src/modules/occupancy/dtos/occupancy.dto.ts
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import { IsNotEmpty, IsNumber, IsString } from 'class-validator';
|
||||||
|
|
||||||
|
export class SpaceDailyOccupancyDurationDto {
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
public uuid: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
public spaceUuid: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
public eventDate: string;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
@IsNotEmpty()
|
||||||
|
public occupancyPercentage: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
@IsNotEmpty()
|
||||||
|
public occupiedSeconds: number;
|
||||||
|
}
|
1
libs/common/src/modules/occupancy/entities/index.ts
Normal file
1
libs/common/src/modules/occupancy/entities/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from './occupancy.entity';
|
@ -0,0 +1,32 @@
|
|||||||
|
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
|
||||||
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
|
import { SpaceDailyOccupancyDurationDto } from '../dtos';
|
||||||
|
|
||||||
|
@Entity({ name: 'space-daily-occupancy-duration' })
|
||||||
|
@Unique(['spaceUuid', 'eventDate'])
|
||||||
|
export class SpaceDailyOccupancyDurationEntity extends AbstractEntity<SpaceDailyOccupancyDurationDto> {
|
||||||
|
@Column({ nullable: false })
|
||||||
|
public spaceUuid: string;
|
||||||
|
|
||||||
|
@Column({ nullable: false, type: 'date' })
|
||||||
|
public eventDate: string;
|
||||||
|
|
||||||
|
public CountTotalPresenceDetected: number;
|
||||||
|
|
||||||
|
@ManyToOne(() => SpaceEntity, (space) => space.presenceSensorDaily)
|
||||||
|
space: SpaceEntity;
|
||||||
|
|
||||||
|
@Column({ type: 'int' })
|
||||||
|
occupancyPercentage: number;
|
||||||
|
|
||||||
|
@Column({ type: 'int', nullable: true })
|
||||||
|
occupiedSeconds?: number;
|
||||||
|
|
||||||
|
@Column({ type: 'int', nullable: true })
|
||||||
|
deviceCount?: number;
|
||||||
|
constructor(partial: Partial<SpaceDailyOccupancyDurationEntity>) {
|
||||||
|
super();
|
||||||
|
Object.assign(this, partial);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,11 @@
|
|||||||
|
import { Module } from '@nestjs/common';
|
||||||
|
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||||
|
import { SpaceDailyOccupancyDurationEntity } from './entities/occupancy.entity';
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
providers: [],
|
||||||
|
exports: [],
|
||||||
|
controllers: [],
|
||||||
|
imports: [TypeOrmModule.forFeature([SpaceDailyOccupancyDurationEntity])],
|
||||||
|
})
|
||||||
|
export class SpaceDailyOccupancyDurationRepositoryModule {}
|
1
libs/common/src/modules/occupancy/repositories/index.ts
Normal file
1
libs/common/src/modules/occupancy/repositories/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from './occupancy.repository';
|
@ -0,0 +1,10 @@
|
|||||||
|
import { DataSource, Repository } from 'typeorm';
|
||||||
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { SpaceDailyOccupancyDurationEntity } from '../entities/occupancy.entity';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class SpaceDailyOccupancyDurationEntityRepository extends Repository<SpaceDailyOccupancyDurationEntity> {
|
||||||
|
constructor(private dataSource: DataSource) {
|
||||||
|
super(SpaceDailyOccupancyDurationEntity, dataSource.createEntityManager());
|
||||||
|
}
|
||||||
|
}
|
@ -1,10 +1,7 @@
|
|||||||
import { Column, Entity, OneToMany } from 'typeorm';
|
import { Column, Entity, OneToMany } from 'typeorm';
|
||||||
import { ProductDto } from '../dtos';
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
import { DeviceEntity } from '../../device/entities';
|
import { DeviceEntity } from '../../device/entities';
|
||||||
import { TagModel } from '../../space-model';
|
import { ProductDto } from '../dtos';
|
||||||
import { TagEntity } from '../../space/entities/tag.entity';
|
|
||||||
import { NewTagEntity } from '../../tag/entities';
|
|
||||||
@Entity({ name: 'product' })
|
@Entity({ name: 'product' })
|
||||||
export class ProductEntity extends AbstractEntity<ProductDto> {
|
export class ProductEntity extends AbstractEntity<ProductDto> {
|
||||||
@Column({
|
@Column({
|
||||||
@ -28,15 +25,6 @@ export class ProductEntity extends AbstractEntity<ProductDto> {
|
|||||||
})
|
})
|
||||||
public prodType: string;
|
public prodType: string;
|
||||||
|
|
||||||
@OneToMany(() => NewTagEntity, (tag) => tag.product, { cascade: true })
|
|
||||||
public newTags: NewTagEntity[];
|
|
||||||
|
|
||||||
@OneToMany(() => TagModel, (tag) => tag.product)
|
|
||||||
tagModels: TagModel[];
|
|
||||||
|
|
||||||
@OneToMany(() => TagEntity, (tag) => tag.product)
|
|
||||||
tags: TagEntity[];
|
|
||||||
|
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
() => DeviceEntity,
|
() => DeviceEntity,
|
||||||
(devicesProductEntity) => devicesProductEntity.productDevice,
|
(devicesProductEntity) => devicesProductEntity.productDevice,
|
||||||
|
@ -12,6 +12,7 @@ export class RoleTypeEntity extends AbstractEntity<RoleTypeDto> {
|
|||||||
nullable: false,
|
nullable: false,
|
||||||
enum: Object.values(RoleType),
|
enum: Object.values(RoleType),
|
||||||
})
|
})
|
||||||
|
// why is this ts-type string not enum?
|
||||||
type: string;
|
type: string;
|
||||||
@OneToMany(() => UserEntity, (inviteUser) => inviteUser.roleType, {
|
@OneToMany(() => UserEntity, (inviteUser) => inviteUser.roleType, {
|
||||||
nullable: true,
|
nullable: true,
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
import { IsNotEmpty, IsString } from 'class-validator';
|
|
||||||
|
|
||||||
export class TagModelDto {
|
|
||||||
@IsString()
|
|
||||||
@IsNotEmpty()
|
|
||||||
public uuid: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@IsNotEmpty()
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@IsNotEmpty()
|
|
||||||
public productUuid: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
spaceModelUuid: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
subspaceModelUuid: string;
|
|
||||||
}
|
|
@ -1,4 +1,3 @@
|
|||||||
|
export * from './space-model-product-allocation.entity';
|
||||||
export * from './space-model.entity';
|
export * from './space-model.entity';
|
||||||
export * from './subspace-model';
|
export * from './subspace-model';
|
||||||
export * from './tag-model.entity';
|
|
||||||
export * from './space-model-product-allocation.entity';
|
|
||||||
|
@ -1,18 +1,12 @@
|
|||||||
import {
|
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm';
|
||||||
Entity,
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
Column,
|
|
||||||
ManyToOne,
|
|
||||||
ManyToMany,
|
|
||||||
JoinTable,
|
|
||||||
OneToMany,
|
|
||||||
} from 'typeorm';
|
|
||||||
import { SpaceModelEntity } from './space-model.entity';
|
|
||||||
import { NewTagEntity } from '../../tag/entities/tag.entity';
|
|
||||||
import { ProductEntity } from '../../product/entities/product.entity';
|
import { ProductEntity } from '../../product/entities/product.entity';
|
||||||
import { SpaceProductAllocationEntity } from '../../space/entities/space-product-allocation.entity';
|
import { SpaceProductAllocationEntity } from '../../space/entities/space-product-allocation.entity';
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { NewTagEntity } from '../../tag/entities/tag.entity';
|
||||||
|
import { SpaceModelEntity } from './space-model.entity';
|
||||||
|
|
||||||
@Entity({ name: 'space_model_product_allocation' })
|
@Entity({ name: 'space_model_product_allocation' })
|
||||||
|
@Unique(['spaceModel', 'product', 'tag'])
|
||||||
export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModelProductAllocationEntity> {
|
export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModelProductAllocationEntity> {
|
||||||
@Column({
|
@Column({
|
||||||
type: 'uuid',
|
type: 'uuid',
|
||||||
@ -31,9 +25,8 @@ export class SpaceModelProductAllocationEntity extends AbstractEntity<SpaceModel
|
|||||||
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
||||||
public product: ProductEntity;
|
public product: ProductEntity;
|
||||||
|
|
||||||
@ManyToMany(() => NewTagEntity, { cascade: true, onDelete: 'CASCADE' })
|
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
|
||||||
@JoinTable({ name: 'space_model_product_tags' })
|
public tag: NewTagEntity;
|
||||||
public tags: NewTagEntity[];
|
|
||||||
|
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
() => SpaceProductAllocationEntity,
|
() => SpaceProductAllocationEntity,
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
import { Entity, Column, OneToMany, ManyToOne, JoinColumn } from 'typeorm';
|
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
import { SpaceModelDto } from '../dtos';
|
|
||||||
import { SubspaceModelEntity } from './subspace-model';
|
|
||||||
import { ProjectEntity } from '../../project/entities';
|
import { ProjectEntity } from '../../project/entities';
|
||||||
import { TagModel } from './tag-model.entity';
|
|
||||||
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
|
|
||||||
import { SpaceEntity } from '../../space/entities/space.entity';
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
|
import { SpaceModelDto } from '../dtos';
|
||||||
|
import { SpaceModelProductAllocationEntity } from './space-model-product-allocation.entity';
|
||||||
|
import { SubspaceModelEntity } from './subspace-model';
|
||||||
|
|
||||||
@Entity({ name: 'space-model' })
|
@Entity({ name: 'space-model' })
|
||||||
export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
|
export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
|
||||||
@ -49,9 +48,6 @@ export class SpaceModelEntity extends AbstractEntity<SpaceModelDto> {
|
|||||||
})
|
})
|
||||||
public spaces: SpaceEntity[];
|
public spaces: SpaceEntity[];
|
||||||
|
|
||||||
@OneToMany(() => TagModel, (tag) => tag.spaceModel)
|
|
||||||
tags: TagModel[];
|
|
||||||
|
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
() => SpaceModelProductAllocationEntity,
|
() => SpaceModelProductAllocationEntity,
|
||||||
(allocation) => allocation.spaceModel,
|
(allocation) => allocation.spaceModel,
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
|
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
|
||||||
import { SubspaceModelEntity } from './subspace-model.entity';
|
|
||||||
import { ProductEntity } from '@app/common/modules/product/entities/product.entity';
|
import { ProductEntity } from '@app/common/modules/product/entities/product.entity';
|
||||||
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
|
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
|
||||||
|
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
|
||||||
import { SubspaceModelProductAllocationDto } from '../../dtos/subspace-model/subspace-model-product-allocation.dto';
|
import { SubspaceModelProductAllocationDto } from '../../dtos/subspace-model/subspace-model-product-allocation.dto';
|
||||||
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
|
import { SubspaceModelEntity } from './subspace-model.entity';
|
||||||
|
|
||||||
@Entity({ name: 'subspace_model_product_allocation' })
|
@Entity({ name: 'subspace_model_product_allocation' })
|
||||||
|
@Unique(['subspaceModel', 'product', 'tag'])
|
||||||
export class SubspaceModelProductAllocationEntity extends AbstractEntity<SubspaceModelProductAllocationDto> {
|
export class SubspaceModelProductAllocationEntity extends AbstractEntity<SubspaceModelProductAllocationDto> {
|
||||||
@Column({
|
@Column({
|
||||||
type: 'uuid',
|
type: 'uuid',
|
||||||
@ -27,12 +28,8 @@ export class SubspaceModelProductAllocationEntity extends AbstractEntity<Subspac
|
|||||||
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
||||||
public product: ProductEntity;
|
public product: ProductEntity;
|
||||||
|
|
||||||
@ManyToMany(() => NewTagEntity, (tag) => tag.subspaceModelAllocations, {
|
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
|
||||||
cascade: true,
|
public tag: NewTagEntity;
|
||||||
onDelete: 'CASCADE',
|
|
||||||
})
|
|
||||||
@JoinTable({ name: 'subspace_model_product_tags' })
|
|
||||||
public tags: NewTagEntity[];
|
|
||||||
|
|
||||||
constructor(partial: Partial<SubspaceModelProductAllocationEntity>) {
|
constructor(partial: Partial<SubspaceModelProductAllocationEntity>) {
|
||||||
super();
|
super();
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
|
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
|
||||||
|
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
|
||||||
import { Column, Entity, ManyToOne, OneToMany } from 'typeorm';
|
import { Column, Entity, ManyToOne, OneToMany } from 'typeorm';
|
||||||
import { SubSpaceModelDto } from '../../dtos';
|
import { SubSpaceModelDto } from '../../dtos';
|
||||||
import { SpaceModelEntity } from '../space-model.entity';
|
import { SpaceModelEntity } from '../space-model.entity';
|
||||||
import { TagModel } from '../tag-model.entity';
|
|
||||||
import { SubspaceModelProductAllocationEntity } from './subspace-model-product-allocation.entity';
|
import { SubspaceModelProductAllocationEntity } from './subspace-model-product-allocation.entity';
|
||||||
import { SubspaceEntity } from '@app/common/modules/space/entities/subspace/subspace.entity';
|
|
||||||
|
|
||||||
@Entity({ name: 'subspace-model' })
|
@Entity({ name: 'subspace-model' })
|
||||||
export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
|
export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
|
||||||
@ -41,9 +40,6 @@ export class SubspaceModelEntity extends AbstractEntity<SubSpaceModelDto> {
|
|||||||
})
|
})
|
||||||
public disabled: boolean;
|
public disabled: boolean;
|
||||||
|
|
||||||
@OneToMany(() => TagModel, (tag) => tag.subspaceModel)
|
|
||||||
tags: TagModel[];
|
|
||||||
|
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
() => SubspaceModelProductAllocationEntity,
|
() => SubspaceModelProductAllocationEntity,
|
||||||
(allocation) => allocation.subspaceModel,
|
(allocation) => allocation.subspaceModel,
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
|
||||||
import { TagModelDto } from '../dtos/tag-model.dto';
|
|
||||||
import { SpaceModelEntity } from './space-model.entity';
|
|
||||||
import { SubspaceModelEntity } from './subspace-model';
|
|
||||||
import { ProductEntity } from '../../product/entities';
|
|
||||||
import { TagEntity } from '../../space/entities/tag.entity';
|
|
||||||
|
|
||||||
@Entity({ name: 'tag_model' })
|
|
||||||
export class TagModel extends AbstractEntity<TagModelDto> {
|
|
||||||
@Column({ type: 'varchar', length: 255 })
|
|
||||||
tag: string;
|
|
||||||
|
|
||||||
@ManyToOne(() => ProductEntity, (product) => product.tagModels, {
|
|
||||||
nullable: false,
|
|
||||||
})
|
|
||||||
@JoinColumn({ name: 'product_id' })
|
|
||||||
product: ProductEntity;
|
|
||||||
|
|
||||||
@ManyToOne(() => SpaceModelEntity, (space) => space.tags, { nullable: true })
|
|
||||||
@JoinColumn({ name: 'space_model_id' })
|
|
||||||
spaceModel: SpaceModelEntity;
|
|
||||||
|
|
||||||
@ManyToOne(() => SubspaceModelEntity, (subspace) => subspace.tags, {
|
|
||||||
nullable: true,
|
|
||||||
})
|
|
||||||
@JoinColumn({ name: 'subspace_model_id' })
|
|
||||||
subspaceModel: SubspaceModelEntity;
|
|
||||||
|
|
||||||
@Column({
|
|
||||||
nullable: false,
|
|
||||||
default: false,
|
|
||||||
})
|
|
||||||
public disabled: boolean;
|
|
||||||
|
|
||||||
@OneToMany(() => TagEntity, (tag) => tag.model)
|
|
||||||
tags: TagEntity[];
|
|
||||||
}
|
|
@ -1,11 +1,10 @@
|
|||||||
import { DataSource, Repository } from 'typeorm';
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { DataSource, Repository } from 'typeorm';
|
||||||
import {
|
import {
|
||||||
SpaceModelEntity,
|
SpaceModelEntity,
|
||||||
SpaceModelProductAllocationEntity,
|
SpaceModelProductAllocationEntity,
|
||||||
SubspaceModelEntity,
|
SubspaceModelEntity,
|
||||||
SubspaceModelProductAllocationEntity,
|
SubspaceModelProductAllocationEntity,
|
||||||
TagModel,
|
|
||||||
} from '../entities';
|
} from '../entities';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
@ -21,13 +20,6 @@ export class SubspaceModelRepository extends Repository<SubspaceModelEntity> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Injectable()
|
|
||||||
export class TagModelRepository extends Repository<TagModel> {
|
|
||||||
constructor(private dataSource: DataSource) {
|
|
||||||
super(TagModel, dataSource.createEntityManager());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class SpaceModelProductAllocationRepoitory extends Repository<SpaceModelProductAllocationEntity> {
|
export class SpaceModelProductAllocationRepoitory extends Repository<SpaceModelProductAllocationEntity> {
|
||||||
constructor(private dataSource: DataSource) {
|
constructor(private dataSource: DataSource) {
|
||||||
|
@ -1,13 +1,11 @@
|
|||||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
|
||||||
import { SpaceModelEntity, SubspaceModelEntity, TagModel } from './entities';
|
|
||||||
import { Module } from '@nestjs/common';
|
import { Module } from '@nestjs/common';
|
||||||
|
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||||
|
import { SpaceModelEntity, SubspaceModelEntity } from './entities';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
providers: [],
|
providers: [],
|
||||||
exports: [],
|
exports: [],
|
||||||
controllers: [],
|
controllers: [],
|
||||||
imports: [
|
imports: [TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity])],
|
||||||
TypeOrmModule.forFeature([SpaceModelEntity, SubspaceModelEntity, TagModel]),
|
|
||||||
],
|
|
||||||
})
|
})
|
||||||
export class SpaceModelRepositoryModule {}
|
export class SpaceModelRepositoryModule {}
|
||||||
|
@ -1,32 +0,0 @@
|
|||||||
import { Column, Entity, JoinColumn, ManyToOne } from 'typeorm';
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
|
||||||
import { SpaceEntity } from './space.entity';
|
|
||||||
import { Direction } from '@app/common/constants/direction.enum';
|
|
||||||
|
|
||||||
@Entity({ name: 'space-link' })
|
|
||||||
export class SpaceLinkEntity extends AbstractEntity {
|
|
||||||
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
|
|
||||||
@JoinColumn({ name: 'start_space_id' })
|
|
||||||
public startSpace: SpaceEntity;
|
|
||||||
|
|
||||||
@ManyToOne(() => SpaceEntity, { nullable: false, onDelete: 'CASCADE' })
|
|
||||||
@JoinColumn({ name: 'end_space_id' })
|
|
||||||
public endSpace: SpaceEntity;
|
|
||||||
|
|
||||||
@Column({
|
|
||||||
nullable: false,
|
|
||||||
default: false,
|
|
||||||
})
|
|
||||||
public disabled: boolean;
|
|
||||||
|
|
||||||
@Column({
|
|
||||||
nullable: false,
|
|
||||||
enum: Object.values(Direction),
|
|
||||||
})
|
|
||||||
direction: string;
|
|
||||||
|
|
||||||
constructor(partial: Partial<SpaceLinkEntity>) {
|
|
||||||
super();
|
|
||||||
Object.assign(this, partial);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,12 +1,13 @@
|
|||||||
import { Entity, Column, ManyToOne, ManyToMany, JoinTable } from 'typeorm';
|
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
|
||||||
import { SpaceEntity } from './space.entity';
|
|
||||||
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
|
|
||||||
import { ProductEntity } from '../../product/entities/product.entity';
|
|
||||||
import { NewTagEntity } from '../../tag/entities/tag.entity';
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
|
import { ProductEntity } from '../../product/entities/product.entity';
|
||||||
|
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
|
||||||
|
import { NewTagEntity } from '../../tag/entities/tag.entity';
|
||||||
import { SpaceProductAllocationDto } from '../dtos/space-product-allocation.dto';
|
import { SpaceProductAllocationDto } from '../dtos/space-product-allocation.dto';
|
||||||
|
import { SpaceEntity } from './space.entity';
|
||||||
|
|
||||||
@Entity({ name: 'space_product_allocation' })
|
@Entity({ name: 'space_product_allocation' })
|
||||||
|
@Unique(['space', 'product', 'tag'], {})
|
||||||
export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAllocationDto> {
|
export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAllocationDto> {
|
||||||
@Column({
|
@Column({
|
||||||
type: 'uuid',
|
type: 'uuid',
|
||||||
@ -30,9 +31,8 @@ export class SpaceProductAllocationEntity extends AbstractEntity<SpaceProductAll
|
|||||||
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
||||||
public product: ProductEntity;
|
public product: ProductEntity;
|
||||||
|
|
||||||
@ManyToMany(() => NewTagEntity)
|
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
|
||||||
@JoinTable({ name: 'space_product_tags' })
|
public tag: NewTagEntity;
|
||||||
public tags: NewTagEntity[];
|
|
||||||
|
|
||||||
constructor(partial: Partial<SpaceProductAllocationEntity>) {
|
constructor(partial: Partial<SpaceProductAllocationEntity>) {
|
||||||
super();
|
super();
|
||||||
|
@ -1,16 +1,26 @@
|
|||||||
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
|
import {
|
||||||
import { SpaceDto } from '../dtos';
|
Column,
|
||||||
|
Entity,
|
||||||
|
JoinColumn,
|
||||||
|
ManyToOne,
|
||||||
|
OneToMany,
|
||||||
|
OneToOne,
|
||||||
|
} from 'typeorm';
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
import { UserSpaceEntity } from '../../user/entities';
|
import { AqiSpaceDailyPollutantStatsEntity } from '../../aqi/entities';
|
||||||
import { DeviceEntity } from '../../device/entities';
|
import { BookableSpaceEntity } from '../../booking/entities/bookable-space.entity';
|
||||||
import { CommunityEntity } from '../../community/entities';
|
import { CommunityEntity } from '../../community/entities';
|
||||||
import { SpaceLinkEntity } from './space-link.entity';
|
import { DeviceEntity } from '../../device/entities';
|
||||||
|
import { InviteUserSpaceEntity } from '../../Invite-user/entities';
|
||||||
|
import { SpaceDailyOccupancyDurationEntity } from '../../occupancy/entities';
|
||||||
|
import { PresenceSensorDailySpaceEntity } from '../../presence-sensor/entities';
|
||||||
import { SceneEntity } from '../../scene/entities';
|
import { SceneEntity } from '../../scene/entities';
|
||||||
import { SpaceModelEntity } from '../../space-model';
|
import { SpaceModelEntity } from '../../space-model';
|
||||||
import { InviteUserSpaceEntity } from '../../Invite-user/entities';
|
import { UserSpaceEntity } from '../../user/entities';
|
||||||
|
import { SpaceDto } from '../dtos';
|
||||||
import { SpaceProductAllocationEntity } from './space-product-allocation.entity';
|
import { SpaceProductAllocationEntity } from './space-product-allocation.entity';
|
||||||
import { SubspaceEntity } from './subspace/subspace.entity';
|
import { SubspaceEntity } from './subspace/subspace.entity';
|
||||||
import { PresenceSensorDailySpaceEntity } from '../../presence-sensor/entities';
|
import { BookingEntity } from '../../booking/entities/booking.entity';
|
||||||
|
|
||||||
@Entity({ name: 'space' })
|
@Entity({ name: 'space' })
|
||||||
export class SpaceEntity extends AbstractEntity<SpaceDto> {
|
export class SpaceEntity extends AbstractEntity<SpaceDto> {
|
||||||
@ -55,6 +65,12 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
|
|||||||
})
|
})
|
||||||
public disabled: boolean;
|
public disabled: boolean;
|
||||||
|
|
||||||
|
@Column({
|
||||||
|
nullable: true,
|
||||||
|
type: Number,
|
||||||
|
})
|
||||||
|
public order?: number;
|
||||||
|
|
||||||
@OneToMany(() => SubspaceEntity, (subspace) => subspace.space, {
|
@OneToMany(() => SubspaceEntity, (subspace) => subspace.space, {
|
||||||
nullable: true,
|
nullable: true,
|
||||||
})
|
})
|
||||||
@ -73,16 +89,6 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
|
|||||||
)
|
)
|
||||||
devices: DeviceEntity[];
|
devices: DeviceEntity[];
|
||||||
|
|
||||||
@OneToMany(() => SpaceLinkEntity, (connection) => connection.startSpace, {
|
|
||||||
nullable: true,
|
|
||||||
})
|
|
||||||
public outgoingConnections: SpaceLinkEntity[];
|
|
||||||
|
|
||||||
@OneToMany(() => SpaceLinkEntity, (connection) => connection.endSpace, {
|
|
||||||
nullable: true,
|
|
||||||
})
|
|
||||||
public incomingConnections: SpaceLinkEntity[];
|
|
||||||
|
|
||||||
@Column({
|
@Column({
|
||||||
nullable: true,
|
nullable: true,
|
||||||
type: 'text',
|
type: 'text',
|
||||||
@ -115,6 +121,21 @@ export class SpaceEntity extends AbstractEntity<SpaceDto> {
|
|||||||
@OneToMany(() => PresenceSensorDailySpaceEntity, (sensor) => sensor.space)
|
@OneToMany(() => PresenceSensorDailySpaceEntity, (sensor) => sensor.space)
|
||||||
presenceSensorDaily: PresenceSensorDailySpaceEntity[];
|
presenceSensorDaily: PresenceSensorDailySpaceEntity[];
|
||||||
|
|
||||||
|
@OneToMany(() => AqiSpaceDailyPollutantStatsEntity, (aqi) => aqi.space)
|
||||||
|
aqiSensorDaily: AqiSpaceDailyPollutantStatsEntity[];
|
||||||
|
|
||||||
|
@OneToMany(
|
||||||
|
() => SpaceDailyOccupancyDurationEntity,
|
||||||
|
(occupancy) => occupancy.space,
|
||||||
|
)
|
||||||
|
occupancyDaily: SpaceDailyOccupancyDurationEntity[];
|
||||||
|
|
||||||
|
@OneToOne(() => BookableSpaceEntity, (bookable) => bookable.space)
|
||||||
|
bookableConfig: BookableSpaceEntity;
|
||||||
|
|
||||||
|
@OneToMany(() => BookingEntity, (booking) => booking.space)
|
||||||
|
bookings: BookingEntity[];
|
||||||
|
|
||||||
constructor(partial: Partial<SpaceEntity>) {
|
constructor(partial: Partial<SpaceEntity>) {
|
||||||
super();
|
super();
|
||||||
Object.assign(this, partial);
|
Object.assign(this, partial);
|
||||||
|
@ -1,20 +1,13 @@
|
|||||||
import {
|
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
|
||||||
Entity,
|
|
||||||
Column,
|
|
||||||
ManyToOne,
|
|
||||||
ManyToMany,
|
|
||||||
JoinTable,
|
|
||||||
Unique,
|
|
||||||
} from 'typeorm';
|
|
||||||
import { SubspaceEntity } from './subspace.entity';
|
|
||||||
import { ProductEntity } from '@app/common/modules/product/entities';
|
import { ProductEntity } from '@app/common/modules/product/entities';
|
||||||
import { SubspaceModelProductAllocationEntity } from '@app/common/modules/space-model';
|
import { SubspaceModelProductAllocationEntity } from '@app/common/modules/space-model';
|
||||||
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
|
import { NewTagEntity } from '@app/common/modules/tag/entities/tag.entity';
|
||||||
import { AbstractEntity } from '@app/common/modules/abstract/entities/abstract.entity';
|
import { Column, Entity, ManyToOne, Unique } from 'typeorm';
|
||||||
import { SubspaceProductAllocationDto } from '../../dtos/subspace-product-allocation.dto';
|
import { SubspaceProductAllocationDto } from '../../dtos/subspace-product-allocation.dto';
|
||||||
|
import { SubspaceEntity } from './subspace.entity';
|
||||||
|
|
||||||
@Entity({ name: 'subspace_product_allocation' })
|
@Entity({ name: 'subspace_product_allocation' })
|
||||||
@Unique(['subspace', 'product'])
|
@Unique(['subspace', 'product', 'tag'])
|
||||||
export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProductAllocationDto> {
|
export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProductAllocationDto> {
|
||||||
@Column({
|
@Column({
|
||||||
type: 'uuid',
|
type: 'uuid',
|
||||||
@ -38,9 +31,8 @@ export class SubspaceProductAllocationEntity extends AbstractEntity<SubspaceProd
|
|||||||
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
@ManyToOne(() => ProductEntity, { nullable: false, onDelete: 'CASCADE' })
|
||||||
public product: ProductEntity;
|
public product: ProductEntity;
|
||||||
|
|
||||||
@ManyToMany(() => NewTagEntity)
|
@ManyToOne(() => NewTagEntity, { nullable: true, onDelete: 'CASCADE' })
|
||||||
@JoinTable({ name: 'subspace_product_tags' })
|
public tag: NewTagEntity;
|
||||||
public tags: NewTagEntity[];
|
|
||||||
|
|
||||||
constructor(partial: Partial<SubspaceProductAllocationEntity>) {
|
constructor(partial: Partial<SubspaceProductAllocationEntity>) {
|
||||||
super();
|
super();
|
||||||
|
@ -4,7 +4,6 @@ import { SubspaceModelEntity } from '@app/common/modules/space-model';
|
|||||||
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
|
import { Column, Entity, JoinColumn, ManyToOne, OneToMany } from 'typeorm';
|
||||||
import { SubspaceDto } from '../../dtos';
|
import { SubspaceDto } from '../../dtos';
|
||||||
import { SpaceEntity } from '../space.entity';
|
import { SpaceEntity } from '../space.entity';
|
||||||
import { TagEntity } from '../tag.entity';
|
|
||||||
import { SubspaceProductAllocationEntity } from './subspace-product-allocation.entity';
|
import { SubspaceProductAllocationEntity } from './subspace-product-allocation.entity';
|
||||||
|
|
||||||
@Entity({ name: 'subspace' })
|
@Entity({ name: 'subspace' })
|
||||||
@ -43,9 +42,6 @@ export class SubspaceEntity extends AbstractEntity<SubspaceDto> {
|
|||||||
})
|
})
|
||||||
subSpaceModel?: SubspaceModelEntity;
|
subSpaceModel?: SubspaceModelEntity;
|
||||||
|
|
||||||
@OneToMany(() => TagEntity, (tag) => tag.subspace)
|
|
||||||
tags: TagEntity[];
|
|
||||||
|
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
() => SubspaceProductAllocationEntity,
|
() => SubspaceProductAllocationEntity,
|
||||||
(allocation) => allocation.subspace,
|
(allocation) => allocation.subspace,
|
||||||
|
@ -1,41 +0,0 @@
|
|||||||
import { Entity, Column, ManyToOne, JoinColumn, OneToOne } from 'typeorm';
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
|
||||||
import { ProductEntity } from '../../product/entities';
|
|
||||||
import { TagDto } from '../dtos';
|
|
||||||
import { TagModel } from '../../space-model/entities/tag-model.entity';
|
|
||||||
import { DeviceEntity } from '../../device/entities';
|
|
||||||
import { SubspaceEntity } from './subspace/subspace.entity';
|
|
||||||
|
|
||||||
@Entity({ name: 'tag' })
|
|
||||||
export class TagEntity extends AbstractEntity<TagDto> {
|
|
||||||
@Column({ type: 'varchar', length: 255, nullable: true })
|
|
||||||
tag: string;
|
|
||||||
|
|
||||||
@ManyToOne(() => TagModel, (model) => model.tags, {
|
|
||||||
nullable: true,
|
|
||||||
})
|
|
||||||
model: TagModel;
|
|
||||||
|
|
||||||
@ManyToOne(() => ProductEntity, (product) => product.tags, {
|
|
||||||
nullable: false,
|
|
||||||
})
|
|
||||||
product: ProductEntity;
|
|
||||||
|
|
||||||
@ManyToOne(() => SubspaceEntity, (subspace) => subspace.tags, {
|
|
||||||
nullable: true,
|
|
||||||
})
|
|
||||||
@JoinColumn({ name: 'subspace_id' })
|
|
||||||
subspace: SubspaceEntity;
|
|
||||||
|
|
||||||
@Column({
|
|
||||||
nullable: false,
|
|
||||||
default: false,
|
|
||||||
})
|
|
||||||
public disabled: boolean;
|
|
||||||
|
|
||||||
@OneToOne(() => DeviceEntity, (device) => device.tag, {
|
|
||||||
nullable: true,
|
|
||||||
})
|
|
||||||
@JoinColumn({ name: 'device_id' })
|
|
||||||
device: DeviceEntity;
|
|
||||||
}
|
|
@ -1,10 +1,8 @@
|
|||||||
import { DataSource, Repository } from 'typeorm';
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { DataSource, Repository } from 'typeorm';
|
||||||
import { InviteSpaceEntity } from '../entities/invite-space.entity';
|
import { InviteSpaceEntity } from '../entities/invite-space.entity';
|
||||||
import { SpaceLinkEntity } from '../entities/space-link.entity';
|
|
||||||
import { SpaceEntity } from '../entities/space.entity';
|
|
||||||
import { TagEntity } from '../entities/tag.entity';
|
|
||||||
import { SpaceProductAllocationEntity } from '../entities/space-product-allocation.entity';
|
import { SpaceProductAllocationEntity } from '../entities/space-product-allocation.entity';
|
||||||
|
import { SpaceEntity } from '../entities/space.entity';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class SpaceRepository extends Repository<SpaceEntity> {
|
export class SpaceRepository extends Repository<SpaceEntity> {
|
||||||
@ -13,20 +11,6 @@ export class SpaceRepository extends Repository<SpaceEntity> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Injectable()
|
|
||||||
export class SpaceLinkRepository extends Repository<SpaceLinkEntity> {
|
|
||||||
constructor(private dataSource: DataSource) {
|
|
||||||
super(SpaceLinkEntity, dataSource.createEntityManager());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Injectable()
|
|
||||||
export class TagRepository extends Repository<TagEntity> {
|
|
||||||
constructor(private dataSource: DataSource) {
|
|
||||||
super(TagEntity, dataSource.createEntityManager());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class InviteSpaceRepository extends Repository<InviteSpaceEntity> {
|
export class InviteSpaceRepository extends Repository<InviteSpaceEntity> {
|
||||||
constructor(private dataSource: DataSource) {
|
constructor(private dataSource: DataSource) {
|
||||||
|
@ -6,7 +6,6 @@ import { SpaceProductAllocationEntity } from './entities/space-product-allocatio
|
|||||||
import { SpaceEntity } from './entities/space.entity';
|
import { SpaceEntity } from './entities/space.entity';
|
||||||
import { SubspaceProductAllocationEntity } from './entities/subspace/subspace-product-allocation.entity';
|
import { SubspaceProductAllocationEntity } from './entities/subspace/subspace-product-allocation.entity';
|
||||||
import { SubspaceEntity } from './entities/subspace/subspace.entity';
|
import { SubspaceEntity } from './entities/subspace/subspace.entity';
|
||||||
import { TagEntity } from './entities/tag.entity';
|
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
providers: [],
|
providers: [],
|
||||||
@ -16,7 +15,6 @@ import { TagEntity } from './entities/tag.entity';
|
|||||||
TypeOrmModule.forFeature([
|
TypeOrmModule.forFeature([
|
||||||
SpaceEntity,
|
SpaceEntity,
|
||||||
SubspaceEntity,
|
SubspaceEntity,
|
||||||
TagEntity,
|
|
||||||
InviteSpaceEntity,
|
InviteSpaceEntity,
|
||||||
SpaceProductAllocationEntity,
|
SpaceProductAllocationEntity,
|
||||||
SubspaceProductAllocationEntity,
|
SubspaceProductAllocationEntity,
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
import { Entity, Column, ManyToOne, Unique, ManyToMany } from 'typeorm';
|
import { Column, Entity, ManyToOne, OneToMany, Unique } from 'typeorm';
|
||||||
import { ProductEntity } from '../../product/entities';
|
|
||||||
import { ProjectEntity } from '../../project/entities';
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
import { NewTagDto } from '../dtos/tag.dto';
|
import { DeviceEntity } from '../../device/entities/device.entity';
|
||||||
|
import { ProjectEntity } from '../../project/entities';
|
||||||
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
|
import { SpaceModelProductAllocationEntity } from '../../space-model/entities/space-model-product-allocation.entity';
|
||||||
import { SubspaceModelProductAllocationEntity } from '../../space-model/entities/subspace-model/subspace-model-product-allocation.entity';
|
import { SubspaceModelProductAllocationEntity } from '../../space-model/entities/subspace-model/subspace-model-product-allocation.entity';
|
||||||
import { DeviceEntity } from '../../device/entities/device.entity';
|
import { NewTagDto } from '../dtos/tag.dto';
|
||||||
|
|
||||||
@Entity({ name: 'new_tag' })
|
@Entity({ name: 'new_tag' })
|
||||||
@Unique(['name', 'project'])
|
@Unique(['name', 'project'])
|
||||||
@ -24,31 +23,25 @@ export class NewTagEntity extends AbstractEntity<NewTagDto> {
|
|||||||
})
|
})
|
||||||
name: string;
|
name: string;
|
||||||
|
|
||||||
@ManyToOne(() => ProductEntity, (product) => product.newTags, {
|
|
||||||
nullable: false,
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
})
|
|
||||||
public product: ProductEntity;
|
|
||||||
|
|
||||||
@ManyToOne(() => ProjectEntity, (project) => project.tags, {
|
@ManyToOne(() => ProjectEntity, (project) => project.tags, {
|
||||||
nullable: false,
|
nullable: false,
|
||||||
onDelete: 'CASCADE',
|
onDelete: 'CASCADE',
|
||||||
})
|
})
|
||||||
public project: ProjectEntity;
|
public project: ProjectEntity;
|
||||||
|
|
||||||
@ManyToMany(
|
@OneToMany(
|
||||||
() => SpaceModelProductAllocationEntity,
|
() => SpaceModelProductAllocationEntity,
|
||||||
(allocation) => allocation.tags,
|
(allocation) => allocation.tag,
|
||||||
)
|
)
|
||||||
public spaceModelAllocations: SpaceModelProductAllocationEntity[];
|
public spaceModelAllocations: SpaceModelProductAllocationEntity[];
|
||||||
|
|
||||||
@ManyToMany(
|
@OneToMany(
|
||||||
() => SubspaceModelProductAllocationEntity,
|
() => SubspaceModelProductAllocationEntity,
|
||||||
(allocation) => allocation.tags,
|
(allocation) => allocation.tag,
|
||||||
)
|
)
|
||||||
public subspaceModelAllocations: SubspaceModelProductAllocationEntity[];
|
public subspaceModelAllocations: SubspaceModelProductAllocationEntity[];
|
||||||
|
|
||||||
@ManyToOne(() => DeviceEntity, (device) => device.tag)
|
@OneToMany(() => DeviceEntity, (device) => device.tag)
|
||||||
public devices: DeviceEntity[];
|
public devices: DeviceEntity[];
|
||||||
|
|
||||||
constructor(partial: Partial<NewTagEntity>) {
|
constructor(partial: Partial<NewTagEntity>) {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
|
||||||
import {
|
import {
|
||||||
Column,
|
Column,
|
||||||
DeleteDateColumn,
|
DeleteDateColumn,
|
||||||
@ -8,27 +9,27 @@ import {
|
|||||||
OneToOne,
|
OneToOne,
|
||||||
Unique,
|
Unique,
|
||||||
} from 'typeorm';
|
} from 'typeorm';
|
||||||
|
import { OtpType } from '../../../../src/constants/otp-type.enum';
|
||||||
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
|
import { ClientEntity } from '../../client/entities';
|
||||||
|
import {
|
||||||
|
DeviceNotificationEntity,
|
||||||
|
DeviceUserPermissionEntity,
|
||||||
|
} from '../../device/entities';
|
||||||
|
import { InviteUserEntity } from '../../Invite-user/entities';
|
||||||
|
import { ProjectEntity } from '../../project/entities';
|
||||||
|
import { RegionEntity } from '../../region/entities';
|
||||||
|
import { RoleTypeEntity } from '../../role-type/entities';
|
||||||
|
import { SpaceEntity } from '../../space/entities/space.entity';
|
||||||
|
import { TimeZoneEntity } from '../../timezone/entities';
|
||||||
|
import { VisitorPasswordEntity } from '../../visitor-password/entities';
|
||||||
import {
|
import {
|
||||||
UserDto,
|
UserDto,
|
||||||
UserNotificationDto,
|
UserNotificationDto,
|
||||||
UserOtpDto,
|
UserOtpDto,
|
||||||
UserSpaceDto,
|
UserSpaceDto,
|
||||||
} from '../dtos';
|
} from '../dtos';
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { BookingEntity } from '../../booking/entities/booking.entity';
|
||||||
import {
|
|
||||||
DeviceNotificationEntity,
|
|
||||||
DeviceUserPermissionEntity,
|
|
||||||
} from '../../device/entities';
|
|
||||||
import { defaultProfilePicture } from '@app/common/constants/default.profile.picture';
|
|
||||||
import { RegionEntity } from '../../region/entities';
|
|
||||||
import { TimeZoneEntity } from '../../timezone/entities';
|
|
||||||
import { OtpType } from '../../../../src/constants/otp-type.enum';
|
|
||||||
import { RoleTypeEntity } from '../../role-type/entities';
|
|
||||||
import { VisitorPasswordEntity } from '../../visitor-password/entities';
|
|
||||||
import { InviteUserEntity } from '../../Invite-user/entities';
|
|
||||||
import { ProjectEntity } from '../../project/entities';
|
|
||||||
import { SpaceEntity } from '../../space/entities/space.entity';
|
|
||||||
import { ClientEntity } from '../../client/entities';
|
|
||||||
|
|
||||||
@Entity({ name: 'user' })
|
@Entity({ name: 'user' })
|
||||||
export class UserEntity extends AbstractEntity<UserDto> {
|
export class UserEntity extends AbstractEntity<UserDto> {
|
||||||
@ -82,6 +83,12 @@ export class UserEntity extends AbstractEntity<UserDto> {
|
|||||||
})
|
})
|
||||||
public isActive: boolean;
|
public isActive: boolean;
|
||||||
|
|
||||||
|
@Column({
|
||||||
|
nullable: true,
|
||||||
|
type: Number,
|
||||||
|
})
|
||||||
|
public bookingPoints?: number;
|
||||||
|
|
||||||
@Column({ default: false })
|
@Column({ default: false })
|
||||||
hasAcceptedWebAgreement: boolean;
|
hasAcceptedWebAgreement: boolean;
|
||||||
|
|
||||||
@ -94,7 +101,9 @@ export class UserEntity extends AbstractEntity<UserDto> {
|
|||||||
@Column({ type: 'timestamp', nullable: true })
|
@Column({ type: 'timestamp', nullable: true })
|
||||||
appAgreementAcceptedAt: Date;
|
appAgreementAcceptedAt: Date;
|
||||||
|
|
||||||
@OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user)
|
@OneToMany(() => UserSpaceEntity, (userSpace) => userSpace.user, {
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
})
|
||||||
userSpaces: UserSpaceEntity[];
|
userSpaces: UserSpaceEntity[];
|
||||||
|
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
@ -113,6 +122,9 @@ export class UserEntity extends AbstractEntity<UserDto> {
|
|||||||
)
|
)
|
||||||
deviceUserNotification: DeviceNotificationEntity[];
|
deviceUserNotification: DeviceNotificationEntity[];
|
||||||
|
|
||||||
|
@OneToMany(() => BookingEntity, (booking) => booking.user)
|
||||||
|
bookings: BookingEntity[];
|
||||||
|
|
||||||
@ManyToOne(() => RegionEntity, (region) => region.users, { nullable: true })
|
@ManyToOne(() => RegionEntity, (region) => region.users, { nullable: true })
|
||||||
region: RegionEntity;
|
region: RegionEntity;
|
||||||
@ManyToOne(() => TimeZoneEntity, (timezone) => timezone.users, {
|
@ManyToOne(() => TimeZoneEntity, (timezone) => timezone.users, {
|
||||||
@ -158,6 +170,7 @@ export class UserEntity extends AbstractEntity<UserDto> {
|
|||||||
export class UserNotificationEntity extends AbstractEntity<UserNotificationDto> {
|
export class UserNotificationEntity extends AbstractEntity<UserNotificationDto> {
|
||||||
@ManyToOne(() => UserEntity, (user) => user.roleType, {
|
@ManyToOne(() => UserEntity, (user) => user.roleType, {
|
||||||
nullable: false,
|
nullable: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
})
|
})
|
||||||
user: UserEntity;
|
user: UserEntity;
|
||||||
@Column({
|
@Column({
|
||||||
@ -219,7 +232,10 @@ export class UserSpaceEntity extends AbstractEntity<UserSpaceDto> {
|
|||||||
})
|
})
|
||||||
public uuid: string;
|
public uuid: string;
|
||||||
|
|
||||||
@ManyToOne(() => UserEntity, (user) => user.userSpaces, { nullable: false })
|
@ManyToOne(() => UserEntity, (user) => user.userSpaces, {
|
||||||
|
nullable: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
})
|
||||||
user: UserEntity;
|
user: UserEntity;
|
||||||
|
|
||||||
@ManyToOne(() => SpaceEntity, (space) => space.userSpaces, {
|
@ManyToOne(() => SpaceEntity, (space) => space.userSpaces, {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { Column, Entity, ManyToOne, JoinColumn, Index } from 'typeorm';
|
import { Column, Entity, Index, JoinColumn, ManyToOne } from 'typeorm';
|
||||||
import { VisitorPasswordDto } from '../dtos';
|
|
||||||
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
import { AbstractEntity } from '../../abstract/entities/abstract.entity';
|
||||||
import { UserEntity } from '../../user/entities/user.entity';
|
import { UserEntity } from '../../user/entities/user.entity';
|
||||||
|
import { VisitorPasswordDto } from '../dtos';
|
||||||
|
|
||||||
@Entity({ name: 'visitor-password' })
|
@Entity({ name: 'visitor-password' })
|
||||||
@Index('IDX_PASSWORD_TUYA_UUID', ['passwordTuyaUuid'])
|
@Index('IDX_PASSWORD_TUYA_UUID', ['passwordTuyaUuid'])
|
||||||
@ -14,6 +14,7 @@ export class VisitorPasswordEntity extends AbstractEntity<VisitorPasswordDto> {
|
|||||||
|
|
||||||
@ManyToOne(() => UserEntity, (user) => user.visitorPasswords, {
|
@ManyToOne(() => UserEntity, (user) => user.visitorPasswords, {
|
||||||
nullable: false,
|
nullable: false,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
})
|
})
|
||||||
@JoinColumn({ name: 'authorizer_uuid' })
|
@JoinColumn({ name: 'authorizer_uuid' })
|
||||||
public user: UserEntity;
|
public user: UserEntity;
|
||||||
|
@ -61,6 +61,10 @@ export class SuperAdminSeeder {
|
|||||||
lastName: 'Admin',
|
lastName: 'Admin',
|
||||||
isUserVerified: true,
|
isUserVerified: true,
|
||||||
isActive: true,
|
isActive: true,
|
||||||
|
hasAcceptedAppAgreement: true,
|
||||||
|
hasAcceptedWebAgreement: true,
|
||||||
|
appAgreementAcceptedAt: new Date(),
|
||||||
|
webAgreementAcceptedAt: new Date(),
|
||||||
roleType: { uuid: defaultUserRoleUuid },
|
roleType: { uuid: defaultUserRoleUuid },
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
WITH params AS (
|
||||||
|
SELECT
|
||||||
|
$1::uuid AS space_uuid,
|
||||||
|
TO_DATE(NULLIF($2, ''), 'YYYY-MM') AS event_month
|
||||||
|
)
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
sdp.space_uuid,
|
||||||
|
sdp.event_date,
|
||||||
|
sdp.good_aqi_percentage, sdp.moderate_aqi_percentage, sdp.unhealthy_sensitive_aqi_percentage, sdp.unhealthy_aqi_percentage,
|
||||||
|
sdp.very_unhealthy_aqi_percentage, sdp.hazardous_aqi_percentage,
|
||||||
|
sdp.daily_avg_aqi, sdp.daily_max_aqi, sdp.daily_min_aqi,
|
||||||
|
|
||||||
|
sdp.good_pm25_percentage, sdp.moderate_pm25_percentage, sdp.unhealthy_sensitive_pm25_percentage, sdp.unhealthy_pm25_percentage,
|
||||||
|
sdp.very_unhealthy_pm25_percentage, sdp.hazardous_pm25_percentage,
|
||||||
|
sdp.daily_avg_pm25, sdp.daily_max_pm25, sdp.daily_min_pm25,
|
||||||
|
|
||||||
|
sdp.good_pm10_percentage, sdp.moderate_pm10_percentage, sdp.unhealthy_sensitive_pm10_percentage, sdp.unhealthy_pm10_percentage,
|
||||||
|
sdp.very_unhealthy_pm10_percentage, sdp.hazardous_pm10_percentage,
|
||||||
|
sdp.daily_avg_pm10, sdp.daily_max_pm10, sdp.daily_min_pm10,
|
||||||
|
|
||||||
|
sdp.good_voc_percentage, sdp.moderate_voc_percentage, sdp.unhealthy_sensitive_voc_percentage, sdp.unhealthy_voc_percentage,
|
||||||
|
sdp.very_unhealthy_voc_percentage, sdp.hazardous_voc_percentage,
|
||||||
|
sdp.daily_avg_voc, sdp.daily_max_voc, sdp.daily_min_voc,
|
||||||
|
|
||||||
|
sdp.good_co2_percentage, sdp.moderate_co2_percentage, sdp.unhealthy_sensitive_co2_percentage, sdp.unhealthy_co2_percentage,
|
||||||
|
sdp.very_unhealthy_co2_percentage, sdp.hazardous_co2_percentage,
|
||||||
|
sdp.daily_avg_co2, sdp.daily_max_co2, sdp.daily_min_co2,
|
||||||
|
|
||||||
|
sdp.good_ch2o_percentage, sdp.moderate_ch2o_percentage, sdp.unhealthy_sensitive_ch2o_percentage, sdp.unhealthy_ch2o_percentage,
|
||||||
|
sdp.very_unhealthy_ch2o_percentage, sdp.hazardous_ch2o_percentage,
|
||||||
|
sdp.daily_avg_ch2o, sdp.daily_max_ch2o, sdp.daily_min_ch2o
|
||||||
|
|
||||||
|
FROM public."space-daily-pollutant-stats" AS sdp
|
||||||
|
CROSS JOIN params p
|
||||||
|
WHERE
|
||||||
|
(p.space_uuid IS NULL OR sdp.space_uuid = p.space_uuid)
|
||||||
|
AND (p.event_month IS NULL OR TO_CHAR(sdp.event_date, 'YYYY-MM') = TO_CHAR(p.event_month, 'YYYY-MM'))
|
||||||
|
ORDER BY sdp.space_uuid, sdp.event_date;
|
@ -0,0 +1,376 @@
|
|||||||
|
WITH params AS (
|
||||||
|
SELECT
|
||||||
|
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Query Pipeline Starts Here
|
||||||
|
device_space AS (
|
||||||
|
SELECT
|
||||||
|
device.uuid AS device_id,
|
||||||
|
device.space_device_uuid AS space_id,
|
||||||
|
"device-status-log".event_time::timestamp AS event_time,
|
||||||
|
"device-status-log".code,
|
||||||
|
"device-status-log".value
|
||||||
|
FROM device
|
||||||
|
LEFT JOIN "device-status-log"
|
||||||
|
ON device.uuid = "device-status-log".device_id
|
||||||
|
LEFT JOIN product
|
||||||
|
ON product.uuid = device.product_device_uuid
|
||||||
|
WHERE product.cat_name = 'hjjcy'
|
||||||
|
),
|
||||||
|
|
||||||
|
average_pollutants AS (
|
||||||
|
SELECT
|
||||||
|
event_time::date AS event_date,
|
||||||
|
date_trunc('hour', event_time) AS event_hour,
|
||||||
|
space_id,
|
||||||
|
|
||||||
|
-- PM1
|
||||||
|
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
|
||||||
|
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
|
||||||
|
|
||||||
|
-- PM25
|
||||||
|
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
|
||||||
|
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
|
||||||
|
|
||||||
|
-- PM10
|
||||||
|
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
|
||||||
|
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
|
||||||
|
|
||||||
|
-- VOC
|
||||||
|
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
|
||||||
|
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
|
||||||
|
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
|
||||||
|
|
||||||
|
-- CH2O
|
||||||
|
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
|
||||||
|
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
|
||||||
|
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
|
||||||
|
|
||||||
|
-- CO2
|
||||||
|
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
|
||||||
|
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
|
||||||
|
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
|
||||||
|
|
||||||
|
FROM device_space
|
||||||
|
GROUP BY space_id, event_hour, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
filled_pollutants AS (
|
||||||
|
SELECT
|
||||||
|
*,
|
||||||
|
-- AVG
|
||||||
|
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
|
||||||
|
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
|
||||||
|
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
|
||||||
|
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
|
||||||
|
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
|
||||||
|
|
||||||
|
-- MIN
|
||||||
|
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
|
||||||
|
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
|
||||||
|
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
|
||||||
|
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
|
||||||
|
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
|
||||||
|
|
||||||
|
-- MAX
|
||||||
|
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
|
||||||
|
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
|
||||||
|
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
|
||||||
|
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
|
||||||
|
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
|
||||||
|
FROM average_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
hourly_results AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
event_hour,
|
||||||
|
pm1_min, pm1_avg, pm1_max,
|
||||||
|
pm25_min_f, pm25_avg_f, pm25_max_f,
|
||||||
|
pm10_min_f, pm10_avg_f, pm10_max_f,
|
||||||
|
voc_min_f, voc_avg_f, voc_max_f,
|
||||||
|
co2_min_f, co2_avg_f, co2_max_f,
|
||||||
|
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_min_f),
|
||||||
|
calculate_aqi('pm10', pm10_min_f)
|
||||||
|
) AS hourly_min_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
) AS hourly_avg_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_max_f),
|
||||||
|
calculate_aqi('pm10', pm10_max_f)
|
||||||
|
) AS hourly_max_aqi,
|
||||||
|
|
||||||
|
classify_aqi(GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
)) AS aqi_category,
|
||||||
|
|
||||||
|
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
|
||||||
|
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
|
||||||
|
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
|
||||||
|
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
|
||||||
|
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
|
||||||
|
|
||||||
|
FROM filled_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_category_counts AS (
|
||||||
|
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, aqi_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, pm25_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, pm10_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, voc_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, co2_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, ch2o_category
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_totals AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
SUM(category_count) AS total_count
|
||||||
|
FROM daily_category_counts
|
||||||
|
where pollutant = 'aqi'
|
||||||
|
GROUP BY space_id, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Pivot Categories into Columns
|
||||||
|
daily_percentages AS (
|
||||||
|
select
|
||||||
|
dt.space_id,
|
||||||
|
dt.event_date,
|
||||||
|
-- AQI CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
|
||||||
|
-- PM25 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
|
||||||
|
-- PM10 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
|
||||||
|
-- VOC CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
|
||||||
|
-- CO2 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
|
||||||
|
-- CH20 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
|
||||||
|
FROM daily_totals dt
|
||||||
|
LEFT JOIN daily_category_counts dcc
|
||||||
|
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
|
||||||
|
GROUP BY dt.space_id, dt.event_date, dt.total_count
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_averages AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
-- AQI
|
||||||
|
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
|
||||||
|
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
|
||||||
|
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
|
||||||
|
-- PM25
|
||||||
|
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
|
||||||
|
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
|
||||||
|
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
|
||||||
|
-- PM10
|
||||||
|
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
|
||||||
|
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
|
||||||
|
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
|
||||||
|
-- VOC
|
||||||
|
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
|
||||||
|
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
|
||||||
|
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
|
||||||
|
-- CO2
|
||||||
|
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
|
||||||
|
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
|
||||||
|
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
|
||||||
|
-- CH2O
|
||||||
|
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
|
||||||
|
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
|
||||||
|
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
|
||||||
|
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
final_data as(
|
||||||
|
SELECT
|
||||||
|
p.space_id,
|
||||||
|
p.event_date,
|
||||||
|
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
|
||||||
|
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
|
||||||
|
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
|
||||||
|
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
|
||||||
|
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
|
||||||
|
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
|
||||||
|
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
|
||||||
|
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
|
||||||
|
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
|
||||||
|
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
|
||||||
|
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
|
||||||
|
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
|
||||||
|
FROM daily_percentages p
|
||||||
|
LEFT JOIN daily_averages a
|
||||||
|
ON p.space_id = a.space_id
|
||||||
|
AND p.event_date = a.event_date
|
||||||
|
JOIN params
|
||||||
|
ON params.event_date = a.event_date
|
||||||
|
ORDER BY p.space_id, p.event_date)
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO public."space-daily-pollutant-stats" (
|
||||||
|
space_uuid,
|
||||||
|
event_date,
|
||||||
|
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
|
||||||
|
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
|
||||||
|
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
|
||||||
|
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
|
||||||
|
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
|
||||||
|
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
|
||||||
|
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
|
||||||
|
daily_avg_voc, daily_max_voc, daily_min_voc,
|
||||||
|
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
|
||||||
|
daily_avg_co2, daily_max_co2, daily_min_co2,
|
||||||
|
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
|
||||||
|
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
|
||||||
|
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
|
||||||
|
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
|
||||||
|
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
|
||||||
|
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
|
||||||
|
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
|
||||||
|
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
|
||||||
|
daily_avg_voc, daily_max_voc, daily_min_voc,
|
||||||
|
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
|
||||||
|
daily_avg_co2, daily_max_co2, daily_min_co2,
|
||||||
|
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
|
||||||
|
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
|
||||||
|
FROM final_data
|
||||||
|
ON CONFLICT (space_uuid, event_date) DO UPDATE
|
||||||
|
SET
|
||||||
|
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
|
||||||
|
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
|
||||||
|
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
|
||||||
|
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
|
||||||
|
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
|
||||||
|
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
|
||||||
|
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
|
||||||
|
daily_max_aqi = EXCLUDED.daily_max_aqi,
|
||||||
|
daily_min_aqi = EXCLUDED.daily_min_aqi,
|
||||||
|
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
|
||||||
|
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
|
||||||
|
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
|
||||||
|
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
|
||||||
|
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
|
||||||
|
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
|
||||||
|
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
|
||||||
|
daily_max_pm25 = EXCLUDED.daily_max_pm25,
|
||||||
|
daily_min_pm25 = EXCLUDED.daily_min_pm25,
|
||||||
|
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
|
||||||
|
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
|
||||||
|
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
|
||||||
|
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
|
||||||
|
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
|
||||||
|
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
|
||||||
|
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
|
||||||
|
daily_max_pm10 = EXCLUDED.daily_max_pm10,
|
||||||
|
daily_min_pm10 = EXCLUDED.daily_min_pm10,
|
||||||
|
good_voc_percentage = EXCLUDED.good_voc_percentage,
|
||||||
|
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
|
||||||
|
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
|
||||||
|
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
|
||||||
|
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
|
||||||
|
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
|
||||||
|
daily_avg_voc = EXCLUDED.daily_avg_voc,
|
||||||
|
daily_max_voc = EXCLUDED.daily_max_voc,
|
||||||
|
daily_min_voc = EXCLUDED.daily_min_voc,
|
||||||
|
good_co2_percentage = EXCLUDED.good_co2_percentage,
|
||||||
|
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
|
||||||
|
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
|
||||||
|
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
|
||||||
|
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
|
||||||
|
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
|
||||||
|
daily_avg_co2 = EXCLUDED.daily_avg_co2,
|
||||||
|
daily_max_co2 = EXCLUDED.daily_max_co2,
|
||||||
|
daily_min_co2 = EXCLUDED.daily_min_co2,
|
||||||
|
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
|
||||||
|
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
|
||||||
|
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
|
||||||
|
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
|
||||||
|
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
|
||||||
|
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
|
||||||
|
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
|
||||||
|
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
|
||||||
|
daily_min_ch2o = EXCLUDED.daily_min_ch2o;
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,367 @@
|
|||||||
|
-- Query Pipeline Starts Here
|
||||||
|
WITH device_space AS (
|
||||||
|
SELECT
|
||||||
|
device.uuid AS device_id,
|
||||||
|
device.space_device_uuid AS space_id,
|
||||||
|
"device-status-log".event_time::timestamp AS event_time,
|
||||||
|
"device-status-log".code,
|
||||||
|
"device-status-log".value
|
||||||
|
FROM device
|
||||||
|
LEFT JOIN "device-status-log"
|
||||||
|
ON device.uuid = "device-status-log".device_id
|
||||||
|
LEFT JOIN product
|
||||||
|
ON product.uuid = device.product_device_uuid
|
||||||
|
WHERE product.cat_name = 'hjjcy'
|
||||||
|
),
|
||||||
|
|
||||||
|
average_pollutants AS (
|
||||||
|
SELECT
|
||||||
|
event_time::date AS event_date,
|
||||||
|
date_trunc('hour', event_time) AS event_hour,
|
||||||
|
space_id,
|
||||||
|
|
||||||
|
-- PM1
|
||||||
|
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
|
||||||
|
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
|
||||||
|
|
||||||
|
-- PM25
|
||||||
|
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
|
||||||
|
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
|
||||||
|
|
||||||
|
-- PM10
|
||||||
|
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
|
||||||
|
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
|
||||||
|
|
||||||
|
-- VOC
|
||||||
|
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
|
||||||
|
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
|
||||||
|
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
|
||||||
|
|
||||||
|
-- CH2O
|
||||||
|
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
|
||||||
|
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
|
||||||
|
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
|
||||||
|
|
||||||
|
-- CO2
|
||||||
|
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
|
||||||
|
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
|
||||||
|
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
|
||||||
|
|
||||||
|
FROM device_space
|
||||||
|
GROUP BY space_id, event_hour, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
filled_pollutants AS (
|
||||||
|
SELECT
|
||||||
|
*,
|
||||||
|
-- AVG
|
||||||
|
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
|
||||||
|
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
|
||||||
|
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
|
||||||
|
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
|
||||||
|
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
|
||||||
|
|
||||||
|
-- MIN
|
||||||
|
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
|
||||||
|
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
|
||||||
|
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
|
||||||
|
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
|
||||||
|
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
|
||||||
|
|
||||||
|
-- MAX
|
||||||
|
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
|
||||||
|
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
|
||||||
|
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
|
||||||
|
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
|
||||||
|
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
|
||||||
|
FROM average_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
hourly_results AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
event_hour,
|
||||||
|
pm1_min, pm1_avg, pm1_max,
|
||||||
|
pm25_min_f, pm25_avg_f, pm25_max_f,
|
||||||
|
pm10_min_f, pm10_avg_f, pm10_max_f,
|
||||||
|
voc_min_f, voc_avg_f, voc_max_f,
|
||||||
|
co2_min_f, co2_avg_f, co2_max_f,
|
||||||
|
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_min_f),
|
||||||
|
calculate_aqi('pm10', pm10_min_f)
|
||||||
|
) AS hourly_min_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
) AS hourly_avg_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_max_f),
|
||||||
|
calculate_aqi('pm10', pm10_max_f)
|
||||||
|
) AS hourly_max_aqi,
|
||||||
|
|
||||||
|
classify_aqi(GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
)) AS aqi_category,
|
||||||
|
|
||||||
|
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
|
||||||
|
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
|
||||||
|
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
|
||||||
|
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
|
||||||
|
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
|
||||||
|
|
||||||
|
FROM filled_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_category_counts AS (
|
||||||
|
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, aqi_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, pm25_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, pm10_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, voc_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, co2_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, ch2o_category
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_totals AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
SUM(category_count) AS total_count
|
||||||
|
FROM daily_category_counts
|
||||||
|
where pollutant = 'aqi'
|
||||||
|
GROUP BY space_id, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Pivot Categories into Columns
|
||||||
|
daily_percentages AS (
|
||||||
|
select
|
||||||
|
dt.space_id,
|
||||||
|
dt.event_date,
|
||||||
|
-- AQI CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
|
||||||
|
-- PM25 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
|
||||||
|
-- PM10 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
|
||||||
|
-- VOC CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
|
||||||
|
-- CO2 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
|
||||||
|
-- CH20 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
|
||||||
|
FROM daily_totals dt
|
||||||
|
LEFT JOIN daily_category_counts dcc
|
||||||
|
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
|
||||||
|
GROUP BY dt.space_id, dt.event_date, dt.total_count
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_averages AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
-- AQI
|
||||||
|
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
|
||||||
|
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
|
||||||
|
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
|
||||||
|
-- PM25
|
||||||
|
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
|
||||||
|
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
|
||||||
|
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
|
||||||
|
-- PM10
|
||||||
|
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
|
||||||
|
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
|
||||||
|
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
|
||||||
|
-- VOC
|
||||||
|
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
|
||||||
|
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
|
||||||
|
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
|
||||||
|
-- CO2
|
||||||
|
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
|
||||||
|
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
|
||||||
|
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
|
||||||
|
-- CH2O
|
||||||
|
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
|
||||||
|
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
|
||||||
|
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
|
||||||
|
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
final_data as(
|
||||||
|
SELECT
|
||||||
|
p.space_id,
|
||||||
|
p.event_date,
|
||||||
|
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
|
||||||
|
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
|
||||||
|
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
|
||||||
|
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
|
||||||
|
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
|
||||||
|
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
|
||||||
|
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
|
||||||
|
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
|
||||||
|
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
|
||||||
|
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
|
||||||
|
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
|
||||||
|
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
|
||||||
|
FROM daily_percentages p
|
||||||
|
LEFT JOIN daily_averages a
|
||||||
|
ON p.space_id = a.space_id AND p.event_date = a.event_date
|
||||||
|
ORDER BY p.space_id, p.event_date)
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO public."space-daily-pollutant-stats" (
|
||||||
|
space_uuid,
|
||||||
|
event_date,
|
||||||
|
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
|
||||||
|
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
|
||||||
|
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
|
||||||
|
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
|
||||||
|
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
|
||||||
|
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
|
||||||
|
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
|
||||||
|
daily_avg_voc, daily_max_voc, daily_min_voc,
|
||||||
|
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
|
||||||
|
daily_avg_co2, daily_max_co2, daily_min_co2,
|
||||||
|
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
|
||||||
|
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
good_aqi_percentage, moderate_aqi_percentage, unhealthy_sensitive_aqi_percentage, unhealthy_aqi_percentage, very_unhealthy_aqi_percentage, hazardous_aqi_percentage,
|
||||||
|
daily_avg_aqi, daily_max_aqi, daily_min_aqi,
|
||||||
|
good_pm25_percentage, moderate_pm25_percentage, unhealthy_sensitive_pm25_percentage, unhealthy_pm25_percentage, very_unhealthy_pm25_percentage, hazardous_pm25_percentage,
|
||||||
|
daily_avg_pm25, daily_max_pm25, daily_min_pm25,
|
||||||
|
good_pm10_percentage, moderate_pm10_percentage, unhealthy_sensitive_pm10_percentage, unhealthy_pm10_percentage, very_unhealthy_pm10_percentage, hazardous_pm10_percentage,
|
||||||
|
daily_avg_pm10, daily_max_pm10, daily_min_pm10,
|
||||||
|
good_voc_percentage, moderate_voc_percentage, unhealthy_sensitive_voc_percentage, unhealthy_voc_percentage, very_unhealthy_voc_percentage, hazardous_voc_percentage,
|
||||||
|
daily_avg_voc, daily_max_voc, daily_min_voc,
|
||||||
|
good_co2_percentage, moderate_co2_percentage, unhealthy_sensitive_co2_percentage, unhealthy_co2_percentage, very_unhealthy_co2_percentage, hazardous_co2_percentage,
|
||||||
|
daily_avg_co2, daily_max_co2, daily_min_co2,
|
||||||
|
good_ch2o_percentage, moderate_ch2o_percentage, unhealthy_sensitive_ch2o_percentage, unhealthy_ch2o_percentage, very_unhealthy_ch2o_percentage, hazardous_ch2o_percentage,
|
||||||
|
daily_avg_ch2o, daily_max_ch2o, daily_min_ch2o
|
||||||
|
FROM final_data
|
||||||
|
ON CONFLICT (space_uuid, event_date) DO UPDATE
|
||||||
|
SET
|
||||||
|
good_aqi_percentage = EXCLUDED.good_aqi_percentage,
|
||||||
|
moderate_aqi_percentage = EXCLUDED.moderate_aqi_percentage,
|
||||||
|
unhealthy_sensitive_aqi_percentage = EXCLUDED.unhealthy_sensitive_aqi_percentage,
|
||||||
|
unhealthy_aqi_percentage = EXCLUDED.unhealthy_aqi_percentage,
|
||||||
|
very_unhealthy_aqi_percentage = EXCLUDED.very_unhealthy_aqi_percentage,
|
||||||
|
hazardous_aqi_percentage = EXCLUDED.hazardous_aqi_percentage,
|
||||||
|
daily_avg_aqi = EXCLUDED.daily_avg_aqi,
|
||||||
|
daily_max_aqi = EXCLUDED.daily_max_aqi,
|
||||||
|
daily_min_aqi = EXCLUDED.daily_min_aqi,
|
||||||
|
good_pm25_percentage = EXCLUDED.good_pm25_percentage,
|
||||||
|
moderate_pm25_percentage = EXCLUDED.moderate_pm25_percentage,
|
||||||
|
unhealthy_sensitive_pm25_percentage = EXCLUDED.unhealthy_sensitive_pm25_percentage,
|
||||||
|
unhealthy_pm25_percentage = EXCLUDED.unhealthy_pm25_percentage,
|
||||||
|
very_unhealthy_pm25_percentage = EXCLUDED.very_unhealthy_pm25_percentage,
|
||||||
|
hazardous_pm25_percentage = EXCLUDED.hazardous_pm25_percentage,
|
||||||
|
daily_avg_pm25 = EXCLUDED.daily_avg_pm25,
|
||||||
|
daily_max_pm25 = EXCLUDED.daily_max_pm25,
|
||||||
|
daily_min_pm25 = EXCLUDED.daily_min_pm25,
|
||||||
|
good_pm10_percentage = EXCLUDED.good_pm10_percentage,
|
||||||
|
moderate_pm10_percentage = EXCLUDED.moderate_pm10_percentage,
|
||||||
|
unhealthy_sensitive_pm10_percentage = EXCLUDED.unhealthy_sensitive_pm10_percentage,
|
||||||
|
unhealthy_pm10_percentage = EXCLUDED.unhealthy_pm10_percentage,
|
||||||
|
very_unhealthy_pm10_percentage = EXCLUDED.very_unhealthy_pm10_percentage,
|
||||||
|
hazardous_pm10_percentage = EXCLUDED.hazardous_pm10_percentage,
|
||||||
|
daily_avg_pm10 = EXCLUDED.daily_avg_pm10,
|
||||||
|
daily_max_pm10 = EXCLUDED.daily_max_pm10,
|
||||||
|
daily_min_pm10 = EXCLUDED.daily_min_pm10,
|
||||||
|
good_voc_percentage = EXCLUDED.good_voc_percentage,
|
||||||
|
moderate_voc_percentage = EXCLUDED.moderate_voc_percentage,
|
||||||
|
unhealthy_sensitive_voc_percentage = EXCLUDED.unhealthy_sensitive_voc_percentage,
|
||||||
|
unhealthy_voc_percentage = EXCLUDED.unhealthy_voc_percentage,
|
||||||
|
very_unhealthy_voc_percentage = EXCLUDED.very_unhealthy_voc_percentage,
|
||||||
|
hazardous_voc_percentage = EXCLUDED.hazardous_voc_percentage,
|
||||||
|
daily_avg_voc = EXCLUDED.daily_avg_voc,
|
||||||
|
daily_max_voc = EXCLUDED.daily_max_voc,
|
||||||
|
daily_min_voc = EXCLUDED.daily_min_voc,
|
||||||
|
good_co2_percentage = EXCLUDED.good_co2_percentage,
|
||||||
|
moderate_co2_percentage = EXCLUDED.moderate_co2_percentage,
|
||||||
|
unhealthy_sensitive_co2_percentage = EXCLUDED.unhealthy_sensitive_co2_percentage,
|
||||||
|
unhealthy_co2_percentage = EXCLUDED.unhealthy_co2_percentage,
|
||||||
|
very_unhealthy_co2_percentage = EXCLUDED.very_unhealthy_co2_percentage,
|
||||||
|
hazardous_co2_percentage = EXCLUDED.hazardous_co2_percentage,
|
||||||
|
daily_avg_co2 = EXCLUDED.daily_avg_co2,
|
||||||
|
daily_max_co2 = EXCLUDED.daily_max_co2,
|
||||||
|
daily_min_co2 = EXCLUDED.daily_min_co2,
|
||||||
|
good_ch2o_percentage = EXCLUDED.good_ch2o_percentage,
|
||||||
|
moderate_ch2o_percentage = EXCLUDED.moderate_ch2o_percentage,
|
||||||
|
unhealthy_sensitive_ch2o_percentage = EXCLUDED.unhealthy_sensitive_ch2o_percentage,
|
||||||
|
unhealthy_ch2o_percentage = EXCLUDED.unhealthy_ch2o_percentage,
|
||||||
|
very_unhealthy_ch2o_percentage = EXCLUDED.very_unhealthy_ch2o_percentage,
|
||||||
|
hazardous_ch2o_percentage = EXCLUDED.hazardous_ch2o_percentage,
|
||||||
|
daily_avg_ch2o = EXCLUDED.daily_avg_ch2o,
|
||||||
|
daily_max_ch2o = EXCLUDED.daily_max_ch2o,
|
||||||
|
daily_min_ch2o = EXCLUDED.daily_min_ch2o;
|
||||||
|
|
||||||
|
|
@ -1,100 +1,94 @@
|
|||||||
-- Step 1: Get device presence events with previous timestamps
|
WITH presence_logs AS (
|
||||||
WITH start_date AS (
|
SELECT
|
||||||
SELECT
|
d.space_device_uuid AS space_id,
|
||||||
d.uuid AS device_id,
|
l.device_id,
|
||||||
d.space_device_uuid AS space_id,
|
l.event_time,
|
||||||
l.value,
|
l.value,
|
||||||
l.event_time::timestamp AS event_time,
|
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time,
|
||||||
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
|
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value
|
||||||
FROM device d
|
FROM device d
|
||||||
LEFT JOIN "device-status-log" l
|
JOIN "device-status-log" l ON d.uuid = l.device_id
|
||||||
ON d.uuid = l.device_id
|
JOIN product p ON p.uuid = d.product_device_uuid
|
||||||
LEFT JOIN product p
|
WHERE l.code = 'presence_state'
|
||||||
ON p.uuid = d.product_device_uuid
|
AND p.cat_name = 'hps'
|
||||||
WHERE p.cat_name = 'hps'
|
|
||||||
AND l.code = 'presence_state'
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 2: Identify periods when device reports "none"
|
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none')
|
||||||
device_none_periods AS (
|
presence_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
device_id,
|
prev_time AS start_time,
|
||||||
event_time AS empty_from,
|
event_time AS end_time
|
||||||
LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
|
FROM presence_logs
|
||||||
FROM start_date
|
WHERE value = 'none'
|
||||||
WHERE value = 'none'
|
AND prev_value = 'presence'
|
||||||
|
AND prev_time IS NOT NULL
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 3: Clip the "none" periods to the edges of each day
|
-- Split intervals across days
|
||||||
clipped_device_none_periods AS (
|
split_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
|
generate_series(
|
||||||
LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
|
date_trunc('day', start_time),
|
||||||
FROM device_none_periods
|
date_trunc('day', end_time),
|
||||||
WHERE empty_until IS NOT NULL
|
interval '1 day'
|
||||||
|
)::date AS event_date,
|
||||||
|
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
|
||||||
|
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
|
||||||
|
FROM presence_intervals
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 4: Break multi-day periods into daily intervals
|
-- Mark and group overlapping intervals per space per day
|
||||||
generated_daily_intervals AS (
|
ordered_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
gs::date AS day,
|
event_date,
|
||||||
GREATEST(clipped_from, gs) AS interval_start,
|
interval_start,
|
||||||
LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
|
interval_end,
|
||||||
FROM clipped_device_none_periods,
|
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
|
||||||
LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
|
FROM split_intervals
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 5: Merge overlapping or adjacent intervals per day
|
grouped_intervals AS (
|
||||||
|
SELECT *,
|
||||||
|
SUM(CASE
|
||||||
|
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
|
||||||
|
ELSE 0
|
||||||
|
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
|
||||||
|
FROM ordered_intervals
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Merge overlapping intervals per group
|
||||||
merged_intervals AS (
|
merged_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
day,
|
event_date,
|
||||||
interval_start,
|
MIN(interval_start) AS merged_start,
|
||||||
interval_end
|
MAX(interval_end) AS merged_end
|
||||||
FROM (
|
FROM grouped_intervals
|
||||||
SELECT
|
GROUP BY space_id, event_date, grp
|
||||||
space_id,
|
|
||||||
day,
|
|
||||||
interval_start,
|
|
||||||
interval_end,
|
|
||||||
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
|
|
||||||
FROM generated_daily_intervals
|
|
||||||
) sub
|
|
||||||
WHERE prev_end IS NULL OR interval_start > prev_end
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 6: Sum up total missing seconds (device reported "none") per day
|
-- Sum durations of merged intervals
|
||||||
missing_seconds_per_day AS (
|
summed_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
day AS missing_date,
|
event_date,
|
||||||
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
|
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
|
||||||
FROM merged_intervals
|
FROM merged_intervals
|
||||||
GROUP BY space_id, day
|
GROUP BY space_id, event_date
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 7: Calculate total occupied time per day (86400 - missing)
|
final_data AS (
|
||||||
occupied_seconds_per_day AS (
|
SELECT
|
||||||
SELECT
|
space_id,
|
||||||
space_id,
|
event_date,
|
||||||
missing_date as event_date,
|
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
|
||||||
86400 - total_missing_seconds AS total_occupied_seconds,
|
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
|
||||||
(86400 - total_missing_seconds)/86400*100 as occupancy_prct
|
FROM summed_intervals
|
||||||
FROM missing_seconds_per_day
|
ORDER BY space_id, event_date)
|
||||||
)
|
|
||||||
|
|
||||||
-- Final Output
|
|
||||||
, final_data as (
|
|
||||||
SELECT space_id,
|
|
||||||
event_date,
|
|
||||||
total_occupied_seconds,
|
|
||||||
occupancy_prct
|
|
||||||
FROM occupied_seconds_per_day
|
|
||||||
ORDER BY 1,2
|
|
||||||
)
|
|
||||||
|
|
||||||
INSERT INTO public."space-daily-occupancy-duration" (
|
INSERT INTO public."space-daily-occupancy-duration" (
|
||||||
space_uuid,
|
space_uuid,
|
||||||
@ -104,12 +98,13 @@ INSERT INTO public."space-daily-occupancy-duration" (
|
|||||||
)
|
)
|
||||||
select space_id,
|
select space_id,
|
||||||
event_date,
|
event_date,
|
||||||
total_occupied_seconds,
|
occupied_seconds,
|
||||||
occupancy_prct
|
occupancy_percentage
|
||||||
FROM final_data
|
FROM final_data
|
||||||
ON CONFLICT (space_uuid, event_date) DO UPDATE
|
ON CONFLICT (space_uuid, event_date) DO UPDATE
|
||||||
SET
|
SET
|
||||||
occupancy_percentage = EXCLUDED.occupancy_percentage;
|
occupancy_percentage = EXCLUDED.occupancy_percentage,
|
||||||
|
occupied_seconds = EXCLUDED.occupied_seconds;
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,117 +1,107 @@
|
|||||||
WITH params AS (
|
WITH params AS (
|
||||||
SELECT
|
SELECT
|
||||||
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date,
|
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
|
||||||
$2::uuid AS space_id
|
|
||||||
)
|
|
||||||
|
|
||||||
, start_date AS (
|
|
||||||
SELECT
|
|
||||||
d.uuid AS device_id,
|
|
||||||
d.space_device_uuid AS space_id,
|
|
||||||
l.value,
|
|
||||||
l.event_time::timestamp AS event_time,
|
|
||||||
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
|
|
||||||
FROM device d
|
|
||||||
LEFT JOIN "device-status-log" l
|
|
||||||
ON d.uuid = l.device_id
|
|
||||||
LEFT JOIN product p
|
|
||||||
ON p.uuid = d.product_device_uuid
|
|
||||||
WHERE p.cat_name = 'hps'
|
|
||||||
AND l.code = 'presence_state'
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 2: Identify periods when device reports "none"
|
presence_logs AS (
|
||||||
device_none_periods AS (
|
SELECT
|
||||||
SELECT
|
d.space_device_uuid AS space_id,
|
||||||
space_id,
|
l.device_id,
|
||||||
device_id,
|
l.event_time,
|
||||||
event_time AS empty_from,
|
l.value,
|
||||||
LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
|
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time
|
||||||
FROM start_date
|
FROM device d
|
||||||
WHERE value = 'none'
|
JOIN "device-status-log" l ON d.uuid = l.device_id
|
||||||
|
JOIN product p ON p.uuid = d.product_device_uuid
|
||||||
|
WHERE l.code = 'presence_state'
|
||||||
|
AND p.cat_name = 'hps'
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 3: Clip the "none" periods to the edges of each day
|
presence_intervals AS (
|
||||||
clipped_device_none_periods AS (
|
SELECT
|
||||||
SELECT
|
space_id,
|
||||||
space_id,
|
prev_time AS start_time,
|
||||||
GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
|
event_time AS end_time
|
||||||
LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
|
FROM presence_logs
|
||||||
FROM device_none_periods
|
WHERE value = 'none' AND prev_time IS NOT NULL
|
||||||
WHERE empty_until IS NOT NULL
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 4: Break multi-day periods into daily intervals
|
split_intervals AS (
|
||||||
generated_daily_intervals AS (
|
SELECT
|
||||||
SELECT
|
space_id,
|
||||||
space_id,
|
generate_series(
|
||||||
gs::date AS day,
|
date_trunc('day', start_time),
|
||||||
GREATEST(clipped_from, gs) AS interval_start,
|
date_trunc('day', end_time),
|
||||||
LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
|
interval '1 day'
|
||||||
FROM clipped_device_none_periods,
|
)::date AS event_date,
|
||||||
LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
|
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
|
||||||
|
LEAST(end_time, date_trunc('day', end_time) + INTERVAL '1 day') AS interval_end
|
||||||
|
FROM presence_intervals
|
||||||
|
),
|
||||||
|
|
||||||
|
ordered_intervals AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
interval_start,
|
||||||
|
interval_end,
|
||||||
|
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
|
||||||
|
FROM split_intervals
|
||||||
|
),
|
||||||
|
|
||||||
|
grouped_intervals AS (
|
||||||
|
SELECT *,
|
||||||
|
SUM(CASE
|
||||||
|
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
|
||||||
|
ELSE 0
|
||||||
|
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
|
||||||
|
FROM ordered_intervals
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 5: Merge overlapping or adjacent intervals per day
|
|
||||||
merged_intervals AS (
|
merged_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
day,
|
event_date,
|
||||||
interval_start,
|
MIN(interval_start) AS merged_start,
|
||||||
interval_end
|
MAX(interval_end) AS merged_end
|
||||||
FROM (
|
FROM grouped_intervals
|
||||||
SELECT
|
GROUP BY space_id, event_date, grp
|
||||||
space_id,
|
|
||||||
day,
|
|
||||||
interval_start,
|
|
||||||
interval_end,
|
|
||||||
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
|
|
||||||
FROM generated_daily_intervals
|
|
||||||
) sub
|
|
||||||
WHERE prev_end IS NULL OR interval_start > prev_end
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 6: Sum up total missing seconds (device reported "none") per day
|
summed_intervals AS (
|
||||||
missing_seconds_per_day AS (
|
SELECT
|
||||||
SELECT
|
space_id,
|
||||||
space_id,
|
event_date,
|
||||||
day AS missing_date,
|
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
|
||||||
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
|
FROM merged_intervals
|
||||||
FROM merged_intervals
|
GROUP BY space_id, event_date
|
||||||
GROUP BY space_id, day
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 7: Calculate total occupied time per day (86400 - missing)
|
final_data AS (
|
||||||
occupied_seconds_per_day AS (
|
SELECT
|
||||||
SELECT
|
s.space_id,
|
||||||
space_id,
|
s.event_date,
|
||||||
missing_date as event_date,
|
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
|
||||||
86400 - total_missing_seconds AS total_occupied_seconds,
|
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
|
||||||
(86400 - total_missing_seconds)/86400*100 as occupancy_percentage
|
FROM summed_intervals s
|
||||||
FROM missing_seconds_per_day
|
JOIN params p
|
||||||
)
|
ON p.event_date = s.event_date
|
||||||
|
|
||||||
-- Final Output
|
|
||||||
, final_data as (
|
|
||||||
SELECT occupied_seconds_per_day.space_id,
|
|
||||||
occupied_seconds_per_day.event_date,
|
|
||||||
occupied_seconds_per_day.occupancy_percentage
|
|
||||||
FROM occupied_seconds_per_day
|
|
||||||
join params p on true
|
|
||||||
and p.space_id = occupied_seconds_per_day.space_id
|
|
||||||
and p.event_date = occupied_seconds_per_day.event_date
|
|
||||||
ORDER BY 1,2
|
|
||||||
)
|
)
|
||||||
|
|
||||||
INSERT INTO public."space-daily-occupancy-duration" (
|
INSERT INTO public."space-daily-occupancy-duration" (
|
||||||
space_uuid,
|
space_uuid,
|
||||||
event_date,
|
event_date,
|
||||||
|
occupied_seconds,
|
||||||
occupancy_percentage
|
occupancy_percentage
|
||||||
)
|
)
|
||||||
select space_id,
|
SELECT
|
||||||
event_date,
|
space_id,
|
||||||
occupancy_percentage
|
event_date,
|
||||||
|
occupied_seconds,
|
||||||
|
occupancy_percentage
|
||||||
FROM final_data
|
FROM final_data
|
||||||
ON CONFLICT (space_uuid, event_date) DO UPDATE
|
ON CONFLICT (space_uuid, event_date) DO UPDATE
|
||||||
SET
|
SET
|
||||||
occupancy_percentage = EXCLUDED.occupancy_percentage;
|
occupancy_percentage = EXCLUDED.occupancy_percentage,
|
||||||
|
occupied_seconds = EXCLUDED.occupied_seconds;
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
WITH params AS (
|
WITH params AS (
|
||||||
SELECT
|
SELECT
|
||||||
$1::uuid AS device_id,
|
$1::date AS target_date
|
||||||
$2::date AS target_date
|
|
||||||
),
|
),
|
||||||
total_energy AS (
|
total_energy AS (
|
||||||
SELECT
|
SELECT
|
||||||
@ -14,8 +13,7 @@ total_energy AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumed'
|
WHERE log.code = 'EnergyConsumed'
|
||||||
AND log.device_id = params.device_id
|
AND log.event_time::date = params.target_date
|
||||||
AND log.event_time::date = params.target_date
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
energy_phase_A AS (
|
energy_phase_A AS (
|
||||||
@ -29,8 +27,7 @@ energy_phase_A AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedA'
|
WHERE log.code = 'EnergyConsumedA'
|
||||||
AND log.device_id = params.device_id
|
AND log.event_time::date = params.target_date
|
||||||
AND log.event_time::date = params.target_date
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
energy_phase_B AS (
|
energy_phase_B AS (
|
||||||
@ -44,8 +41,7 @@ energy_phase_B AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedB'
|
WHERE log.code = 'EnergyConsumedB'
|
||||||
AND log.device_id = params.device_id
|
AND log.event_time::date = params.target_date
|
||||||
AND log.event_time::date = params.target_date
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
energy_phase_C AS (
|
energy_phase_C AS (
|
||||||
@ -59,8 +55,7 @@ energy_phase_C AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedC'
|
WHERE log.code = 'EnergyConsumedC'
|
||||||
AND log.device_id = params.device_id
|
AND log.event_time::date = params.target_date
|
||||||
AND log.event_time::date = params.target_date
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
final_data AS (
|
final_data AS (
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
WITH params AS (
|
WITH params AS (
|
||||||
SELECT
|
SELECT
|
||||||
$1::uuid AS device_id,
|
$1::date AS target_date
|
||||||
$2::date AS target_date,
|
|
||||||
$3::text AS target_hour
|
|
||||||
),
|
),
|
||||||
total_energy AS (
|
total_energy AS (
|
||||||
SELECT
|
SELECT
|
||||||
@ -15,9 +13,7 @@ total_energy AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumed'
|
WHERE log.code = 'EnergyConsumed'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND log.event_time::date = params.target_date
|
AND log.event_time::date = params.target_date
|
||||||
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
energy_phase_A AS (
|
energy_phase_A AS (
|
||||||
@ -31,9 +27,7 @@ energy_phase_A AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedA'
|
WHERE log.code = 'EnergyConsumedA'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND log.event_time::date = params.target_date
|
AND log.event_time::date = params.target_date
|
||||||
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
energy_phase_B AS (
|
energy_phase_B AS (
|
||||||
@ -47,9 +41,7 @@ energy_phase_B AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedB'
|
WHERE log.code = 'EnergyConsumedB'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND log.event_time::date = params.target_date
|
AND log.event_time::date = params.target_date
|
||||||
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
energy_phase_C AS (
|
energy_phase_C AS (
|
||||||
@ -63,9 +55,7 @@ energy_phase_C AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedC'
|
WHERE log.code = 'EnergyConsumedC'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND log.event_time::date = params.target_date
|
AND log.event_time::date = params.target_date
|
||||||
AND EXTRACT(HOUR FROM log.event_time)::text = params.target_hour
|
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
final_data AS (
|
final_data AS (
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
WITH params AS (
|
WITH params AS (
|
||||||
SELECT
|
SELECT
|
||||||
$1::uuid AS device_id,
|
$1::text AS target_month -- Format should match 'MM-YYYY'
|
||||||
$2::text AS target_month -- Format should match 'MM-YYYY'
|
|
||||||
),
|
),
|
||||||
total_energy AS (
|
total_energy AS (
|
||||||
SELECT
|
SELECT
|
||||||
@ -14,7 +13,6 @@ total_energy AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumed'
|
WHERE log.code = 'EnergyConsumed'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
@ -29,7 +27,6 @@ energy_phase_A AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedA'
|
WHERE log.code = 'EnergyConsumedA'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
@ -44,7 +41,6 @@ energy_phase_B AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedB'
|
WHERE log.code = 'EnergyConsumedB'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
@ -59,7 +55,6 @@ energy_phase_C AS (
|
|||||||
MAX(log.value)::integer AS max_value
|
MAX(log.value)::integer AS max_value
|
||||||
FROM "device-status-log" log, params
|
FROM "device-status-log" log, params
|
||||||
WHERE log.code = 'EnergyConsumedC'
|
WHERE log.code = 'EnergyConsumedC'
|
||||||
AND log.device_id = params.device_id
|
|
||||||
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
AND TO_CHAR(log.event_time, 'MM-YYYY') = params.target_month
|
||||||
GROUP BY 1,2,3,4,5
|
GROUP BY 1,2,3,4,5
|
||||||
),
|
),
|
||||||
|
@ -16,4 +16,5 @@ WITH params AS (
|
|||||||
WHERE A.device_uuid::text = ANY(P.device_ids)
|
WHERE A.device_uuid::text = ANY(P.device_ids)
|
||||||
AND (P.month IS NULL
|
AND (P.month IS NULL
|
||||||
OR date_trunc('month', A.event_date) = P.month
|
OR date_trunc('month', A.event_date) = P.month
|
||||||
)
|
);
|
||||||
|
|
@ -1,7 +1,6 @@
|
|||||||
WITH params AS (
|
WITH params AS (
|
||||||
SELECT
|
SELECT
|
||||||
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date,
|
TO_DATE(NULLIF($1, ''), 'YYYY-MM-DD') AS event_date
|
||||||
$2::uuid AS space_id
|
|
||||||
),
|
),
|
||||||
|
|
||||||
device_logs AS (
|
device_logs AS (
|
||||||
@ -87,8 +86,7 @@ SELECT summary.space_id,
|
|||||||
count_total_presence_detected
|
count_total_presence_detected
|
||||||
FROM summary
|
FROM summary
|
||||||
JOIN params P ON true
|
JOIN params P ON true
|
||||||
where summary.space_id = P.space_id
|
where (P.event_date IS NULL or summary.event_date::date = P.event_date)
|
||||||
and (P.event_date IS NULL or summary.event_date::date = P.event_date)
|
|
||||||
ORDER BY space_id, event_date)
|
ORDER BY space_id, event_date)
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,119 +26,337 @@ BEGIN
|
|||||||
('pm10', 255, 354, 151, 200),
|
('pm10', 255, 354, 151, 200),
|
||||||
|
|
||||||
-- VOC
|
-- VOC
|
||||||
('voc_value', 0, 200, 0, 50),
|
('voc', 0, 200, 0, 50),
|
||||||
('voc_value', 201, 400, 51, 100),
|
('voc', 201, 400, 51, 100),
|
||||||
('voc_value', 401, 600, 101, 150),
|
('voc', 401, 600, 101, 150),
|
||||||
('voc_value', 601, 1000, 151, 200),
|
('voc', 601, 1000, 151, 200),
|
||||||
|
|
||||||
-- CH2O
|
-- CH2O
|
||||||
('ch2o_value', 0, 2, 0, 50),
|
('ch2o', 0, 2, 0, 50),
|
||||||
('ch2o_value', 2.1, 4, 51, 100),
|
('ch2o', 2.1, 4, 51, 100),
|
||||||
('ch2o_value', 4.1, 6, 101, 150),
|
('ch2o', 4.1, 6, 101, 150),
|
||||||
|
|
||||||
-- CO2
|
-- CO2
|
||||||
('co2_value', 350, 1000, 0, 50),
|
('co2', 350, 1000, 0, 50),
|
||||||
('co2_value', 1001, 1250, 51, 100),
|
('co2', 1001, 1250, 51, 100),
|
||||||
('co2_value', 1251, 1500, 101, 150),
|
('co2', 1251, 1500, 101, 150),
|
||||||
('co2_value', 1501, 2000, 151, 200)
|
('co2', 1501, 2000, 151, 200)
|
||||||
) AS v(pollutant, c_low, c_high, i_low, i_high)
|
) AS v(pollutant, c_low, c_high, i_low, i_high)
|
||||||
WHERE v.pollutant = LOWER(p_pollutant)
|
WHERE v.pollutant = LOWER(p_pollutant)
|
||||||
AND concentration BETWEEN v.c_low AND v.c_high
|
AND concentration BETWEEN v.c_low AND v.c_high
|
||||||
LIMIT 1;
|
LIMIT 1;
|
||||||
|
|
||||||
-- Linear interpolation
|
|
||||||
RETURN ROUND(((i_high - i_low) * (concentration - c_low) / (c_high - c_low)) + i_low);
|
RETURN ROUND(((i_high - i_low) * (concentration - c_low) / (c_high - c_low)) + i_low);
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE plpgsql;
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
-- CTE for device + status log + space
|
|
||||||
|
-- Function to classify AQI
|
||||||
|
CREATE OR REPLACE FUNCTION classify_aqi(aqi NUMERIC)
|
||||||
|
RETURNS TEXT AS $$
|
||||||
|
BEGIN
|
||||||
|
RETURN CASE
|
||||||
|
WHEN aqi BETWEEN 0 AND 50 THEN 'Good'
|
||||||
|
WHEN aqi BETWEEN 51 AND 100 THEN 'Moderate'
|
||||||
|
WHEN aqi BETWEEN 101 AND 150 THEN 'Unhealthy for Sensitive Groups'
|
||||||
|
WHEN aqi BETWEEN 151 AND 200 THEN 'Unhealthy'
|
||||||
|
WHEN aqi BETWEEN 201 AND 300 THEN 'Very Unhealthy'
|
||||||
|
WHEN aqi >= 301 THEN 'Hazardous'
|
||||||
|
ELSE NULL
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
|
||||||
|
-- Function to convert AQI level string to number
|
||||||
|
CREATE OR REPLACE FUNCTION level_to_numeric(level_text TEXT)
|
||||||
|
RETURNS NUMERIC AS $$
|
||||||
|
BEGIN
|
||||||
|
RETURN CAST(regexp_replace(level_text, '[^0-9]', '', 'g') AS NUMERIC);
|
||||||
|
EXCEPTION WHEN others THEN
|
||||||
|
RETURN NULL;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
|
||||||
|
-- Query Pipeline Starts Here
|
||||||
WITH device_space AS (
|
WITH device_space AS (
|
||||||
SELECT
|
SELECT
|
||||||
device.uuid AS device_id,
|
device.uuid AS device_id,
|
||||||
device.created_at,
|
|
||||||
device.space_device_uuid AS space_id,
|
device.space_device_uuid AS space_id,
|
||||||
"device-status-log".event_id,
|
"device-status-log".event_time::timestamp AS event_time,
|
||||||
"device-status-log".event_time::timestamp,
|
|
||||||
"device-status-log".code,
|
"device-status-log".code,
|
||||||
"device-status-log".value,
|
"device-status-log".value
|
||||||
"device-status-log".log
|
|
||||||
FROM device
|
FROM device
|
||||||
LEFT JOIN "device-status-log"
|
LEFT JOIN "device-status-log"
|
||||||
ON device.uuid = "device-status-log".device_id
|
ON device.uuid = "device-status-log".device_id
|
||||||
LEFT JOIN product
|
LEFT JOIN product
|
||||||
ON product.uuid = device.product_device_uuid
|
ON product.uuid = device.product_device_uuid
|
||||||
WHERE product.cat_name = 'hjjcy'
|
WHERE product.cat_name = 'hjjcy'
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Aggregate air sensor data per device per day
|
average_pollutants AS (
|
||||||
air_data AS (
|
|
||||||
SELECT
|
SELECT
|
||||||
DATE_TRUNC('day', event_time) AS date,
|
event_time::date AS event_date,
|
||||||
|
date_trunc('hour', event_time) AS event_hour,
|
||||||
device_id,
|
device_id,
|
||||||
space_id,
|
space_id,
|
||||||
|
|
||||||
-- VOC
|
|
||||||
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
|
|
||||||
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
|
|
||||||
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
|
|
||||||
|
|
||||||
-- PM1
|
-- PM1
|
||||||
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
|
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
|
||||||
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
|
|
||||||
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
|
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
|
||||||
|
|
||||||
-- PM2.5
|
-- PM25
|
||||||
MIN(CASE WHEN code = 'pm25' THEN value::numeric END) AS pm25_min,
|
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
|
||||||
MAX(CASE WHEN code = 'pm25' THEN value::numeric END) AS pm25_max,
|
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
|
||||||
AVG(CASE WHEN code = 'pm25' THEN value::numeric END) AS pm25_avg,
|
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
|
||||||
|
|
||||||
-- PM10
|
-- PM10
|
||||||
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
|
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
|
||||||
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
|
|
||||||
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
|
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
|
||||||
|
|
||||||
|
-- VOC
|
||||||
|
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
|
||||||
|
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
|
||||||
|
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
|
||||||
|
|
||||||
-- CH2O
|
-- CH2O
|
||||||
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
|
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
|
||||||
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
|
|
||||||
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
|
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
|
||||||
|
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
|
||||||
-- Humidity
|
|
||||||
MIN(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_low,
|
|
||||||
MAX(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_high,
|
|
||||||
AVG(CASE WHEN code = 'humidity_value' THEN value::numeric END) AS humidity_avg,
|
|
||||||
|
|
||||||
-- Temperature
|
|
||||||
MIN(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_low,
|
|
||||||
MAX(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_high,
|
|
||||||
AVG(CASE WHEN code = 'temp_current' THEN value::numeric END) AS temp_avg,
|
|
||||||
|
|
||||||
-- CO2
|
-- CO2
|
||||||
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
|
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
|
||||||
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max,
|
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
|
||||||
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg
|
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
|
||||||
|
|
||||||
FROM device_space
|
FROM device_space
|
||||||
GROUP BY date, device_id, space_id
|
GROUP BY device_id, space_id, event_hour, event_date
|
||||||
)
|
),
|
||||||
|
|
||||||
|
filled_pollutants AS (
|
||||||
|
SELECT
|
||||||
|
*,
|
||||||
|
-- AVG
|
||||||
|
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_avg_f,
|
||||||
|
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_avg_f,
|
||||||
|
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_avg_f,
|
||||||
|
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_avg_f,
|
||||||
|
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_avg_f,
|
||||||
|
|
||||||
|
-- MIN
|
||||||
|
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_min_f,
|
||||||
|
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_min_f,
|
||||||
|
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_min_f,
|
||||||
|
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_min_f,
|
||||||
|
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_min_f,
|
||||||
|
|
||||||
|
-- MAX
|
||||||
|
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm25_max_f,
|
||||||
|
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS pm10_max_f,
|
||||||
|
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS voc_max_f,
|
||||||
|
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS co2_max_f,
|
||||||
|
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY device_id ORDER BY event_hour)) AS ch2o_max_f
|
||||||
|
FROM average_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
hourly_results AS (
|
||||||
|
SELECT
|
||||||
|
device_id,
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
event_hour,
|
||||||
|
pm1_min, pm1_avg, pm1_max,
|
||||||
|
pm25_min_f, pm25_avg_f, pm25_max_f,
|
||||||
|
pm10_min_f, pm10_avg_f, pm10_max_f,
|
||||||
|
voc_min_f, voc_avg_f, voc_max_f,
|
||||||
|
co2_min_f, co2_avg_f, co2_max_f,
|
||||||
|
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_min_f),
|
||||||
|
calculate_aqi('pm10', pm10_min_f)
|
||||||
|
) AS hourly_min_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
) AS hourly_avg_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_max_f),
|
||||||
|
calculate_aqi('pm10', pm10_max_f)
|
||||||
|
) AS hourly_max_aqi,
|
||||||
|
|
||||||
|
classify_aqi(GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
)) AS aqi_category,
|
||||||
|
|
||||||
|
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
|
||||||
|
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
|
||||||
|
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
|
||||||
|
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
|
||||||
|
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
|
||||||
|
|
||||||
|
FROM filled_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_category_counts AS (
|
||||||
|
SELECT device_id, space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY device_id, space_id, event_date, aqi_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT device_id, space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY device_id, space_id, event_date, pm25_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT device_id, space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY device_id, space_id, event_date, pm10_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT device_id, space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY device_id, space_id, event_date, voc_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT device_id, space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY device_id, space_id, event_date, co2_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT device_id, space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY device_id, space_id, event_date, ch2o_category
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_totals AS (
|
||||||
|
SELECT
|
||||||
|
device_id,
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
SUM(category_count) AS total_count
|
||||||
|
FROM daily_category_counts
|
||||||
|
where pollutant = 'aqi'
|
||||||
|
GROUP BY device_id, space_id, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Pivot Categories into Columns
|
||||||
|
daily_percentages AS (
|
||||||
|
select
|
||||||
|
dt.device_id,
|
||||||
|
dt.space_id,
|
||||||
|
dt.event_date,
|
||||||
|
-- AQI CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
|
||||||
|
-- PM25 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
|
||||||
|
-- PM10 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
|
||||||
|
-- VOC CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
|
||||||
|
-- CO2 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
|
||||||
|
-- CH20 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
|
||||||
|
FROM daily_totals dt
|
||||||
|
LEFT JOIN daily_category_counts dcc
|
||||||
|
ON dt.device_id = dcc.device_id AND dt.event_date = dcc.event_date
|
||||||
|
GROUP BY dt.device_id, dt.space_id, dt.event_date, dt.total_count
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_averages AS (
|
||||||
|
SELECT
|
||||||
|
device_id,
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
-- AQI
|
||||||
|
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
|
||||||
|
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
|
||||||
|
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
|
||||||
|
-- PM25
|
||||||
|
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
|
||||||
|
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
|
||||||
|
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
|
||||||
|
-- PM10
|
||||||
|
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
|
||||||
|
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
|
||||||
|
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
|
||||||
|
-- VOC
|
||||||
|
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
|
||||||
|
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
|
||||||
|
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
|
||||||
|
-- CO2
|
||||||
|
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
|
||||||
|
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
|
||||||
|
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
|
||||||
|
-- CH2O
|
||||||
|
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
|
||||||
|
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
|
||||||
|
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
|
||||||
|
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY device_id, space_id, event_date
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
p.device_id,
|
||||||
|
p.space_id,
|
||||||
|
p.event_date,
|
||||||
|
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
|
||||||
|
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
|
||||||
|
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
|
||||||
|
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
|
||||||
|
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
|
||||||
|
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
|
||||||
|
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
|
||||||
|
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
|
||||||
|
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
|
||||||
|
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
|
||||||
|
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
|
||||||
|
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
|
||||||
|
FROM daily_percentages p
|
||||||
|
LEFT JOIN daily_averages a
|
||||||
|
ON p.device_id = a.device_id AND p.event_date = a.event_date
|
||||||
|
ORDER BY p.space_id, p.event_date;
|
||||||
|
|
||||||
|
|
||||||
-- Final select with AQI calculation
|
|
||||||
SELECT
|
|
||||||
date,
|
|
||||||
device_id,
|
|
||||||
space_id,
|
|
||||||
voc_min, voc_max, voc_avg,
|
|
||||||
pm1_min, pm1_max, pm1_avg,
|
|
||||||
pm25_min, pm25_max, pm25_avg,
|
|
||||||
pm10_min, pm10_max, pm10_avg,
|
|
||||||
ch2o_min, ch2o_max, ch2o_avg,
|
|
||||||
humidity_low, humidity_high, humidity_avg,
|
|
||||||
temp_low, temp_high, temp_avg,
|
|
||||||
co2_min, co2_max, co2_avg,
|
|
||||||
GREATEST(
|
|
||||||
calculate_aqi('pm25', pm25_avg),
|
|
||||||
calculate_aqi('pm10', pm10_avg),
|
|
||||||
calculate_aqi('voc_value', voc_avg),
|
|
||||||
calculate_aqi('co2_value', co2_avg),
|
|
||||||
calculate_aqi('ch2o_value', ch2o_avg)
|
|
||||||
) AS overall_AQI
|
|
||||||
FROM air_data;
|
|
||||||
|
@ -0,0 +1,275 @@
|
|||||||
|
-- Query Pipeline Starts Here
|
||||||
|
WITH device_space AS (
|
||||||
|
SELECT
|
||||||
|
device.uuid AS device_id,
|
||||||
|
device.space_device_uuid AS space_id,
|
||||||
|
"device-status-log".event_time::timestamp AS event_time,
|
||||||
|
"device-status-log".code,
|
||||||
|
"device-status-log".value
|
||||||
|
FROM device
|
||||||
|
LEFT JOIN "device-status-log"
|
||||||
|
ON device.uuid = "device-status-log".device_id
|
||||||
|
LEFT JOIN product
|
||||||
|
ON product.uuid = device.product_device_uuid
|
||||||
|
WHERE product.cat_name = 'hjjcy'
|
||||||
|
),
|
||||||
|
|
||||||
|
average_pollutants AS (
|
||||||
|
SELECT
|
||||||
|
event_time::date AS event_date,
|
||||||
|
date_trunc('hour', event_time) AS event_hour,
|
||||||
|
space_id,
|
||||||
|
|
||||||
|
-- PM1
|
||||||
|
MIN(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_min,
|
||||||
|
AVG(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm1' THEN value::numeric END) AS pm1_max,
|
||||||
|
|
||||||
|
-- PM25
|
||||||
|
MIN(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_min,
|
||||||
|
AVG(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm25_value' THEN value::numeric END) AS pm25_max,
|
||||||
|
|
||||||
|
-- PM10
|
||||||
|
MIN(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_min,
|
||||||
|
AVG(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_avg,
|
||||||
|
MAX(CASE WHEN code = 'pm10' THEN value::numeric END) AS pm10_max,
|
||||||
|
|
||||||
|
-- VOC
|
||||||
|
MIN(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_min,
|
||||||
|
AVG(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_avg,
|
||||||
|
MAX(CASE WHEN code = 'voc_value' THEN value::numeric END) AS voc_max,
|
||||||
|
|
||||||
|
-- CH2O
|
||||||
|
MIN(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_min,
|
||||||
|
AVG(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_avg,
|
||||||
|
MAX(CASE WHEN code = 'ch2o_value' THEN value::numeric END) AS ch2o_max,
|
||||||
|
|
||||||
|
-- CO2
|
||||||
|
MIN(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_min,
|
||||||
|
AVG(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_avg,
|
||||||
|
MAX(CASE WHEN code = 'co2_value' THEN value::numeric END) AS co2_max
|
||||||
|
|
||||||
|
FROM device_space
|
||||||
|
GROUP BY space_id, event_hour, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
filled_pollutants AS (
|
||||||
|
SELECT
|
||||||
|
*,
|
||||||
|
-- AVG
|
||||||
|
COALESCE(pm25_avg, LAG(pm25_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_avg_f,
|
||||||
|
COALESCE(pm10_avg, LAG(pm10_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_avg_f,
|
||||||
|
COALESCE(voc_avg, LAG(voc_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_avg_f,
|
||||||
|
COALESCE(co2_avg, LAG(co2_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_avg_f,
|
||||||
|
COALESCE(ch2o_avg, LAG(ch2o_avg) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_avg_f,
|
||||||
|
|
||||||
|
-- MIN
|
||||||
|
COALESCE(pm25_min, LAG(pm25_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_min_f,
|
||||||
|
COALESCE(pm10_min, LAG(pm10_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_min_f,
|
||||||
|
COALESCE(voc_min, LAG(voc_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_min_f,
|
||||||
|
COALESCE(co2_min, LAG(co2_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_min_f,
|
||||||
|
COALESCE(ch2o_min, LAG(ch2o_min) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_min_f,
|
||||||
|
|
||||||
|
-- MAX
|
||||||
|
COALESCE(pm25_max, LAG(pm25_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm25_max_f,
|
||||||
|
COALESCE(pm10_max, LAG(pm10_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS pm10_max_f,
|
||||||
|
COALESCE(voc_max, LAG(voc_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS voc_max_f,
|
||||||
|
COALESCE(co2_max, LAG(co2_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS co2_max_f,
|
||||||
|
COALESCE(ch2o_max, LAG(ch2o_max) OVER (PARTITION BY space_id ORDER BY event_hour)) AS ch2o_max_f
|
||||||
|
FROM average_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
hourly_results AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
event_hour,
|
||||||
|
pm1_min, pm1_avg, pm1_max,
|
||||||
|
pm25_min_f, pm25_avg_f, pm25_max_f,
|
||||||
|
pm10_min_f, pm10_avg_f, pm10_max_f,
|
||||||
|
voc_min_f, voc_avg_f, voc_max_f,
|
||||||
|
co2_min_f, co2_avg_f, co2_max_f,
|
||||||
|
ch2o_min_f, ch2o_avg_f, ch2o_max_f,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_min_f),
|
||||||
|
calculate_aqi('pm10', pm10_min_f)
|
||||||
|
) AS hourly_min_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
) AS hourly_avg_aqi,
|
||||||
|
|
||||||
|
GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_max_f),
|
||||||
|
calculate_aqi('pm10', pm10_max_f)
|
||||||
|
) AS hourly_max_aqi,
|
||||||
|
|
||||||
|
classify_aqi(GREATEST(
|
||||||
|
calculate_aqi('pm25', pm25_avg_f),
|
||||||
|
calculate_aqi('pm10', pm10_avg_f)
|
||||||
|
)) AS aqi_category,
|
||||||
|
|
||||||
|
classify_aqi(calculate_aqi('pm25',pm25_avg_f)) as pm25_category,
|
||||||
|
classify_aqi(calculate_aqi('pm10',pm10_avg_f)) as pm10_category,
|
||||||
|
classify_aqi(calculate_aqi('voc',voc_avg_f)) as voc_category,
|
||||||
|
classify_aqi(calculate_aqi('co2',co2_avg_f)) as co2_category,
|
||||||
|
classify_aqi(calculate_aqi('ch2o',ch2o_avg_f)) as ch2o_category
|
||||||
|
|
||||||
|
FROM filled_pollutants
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_category_counts AS (
|
||||||
|
SELECT space_id, event_date, aqi_category AS category, 'aqi' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, aqi_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, pm25_category AS category, 'pm25' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, pm25_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, pm10_category AS category, 'pm10' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, pm10_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, voc_category AS category, 'voc' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, voc_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, co2_category AS category, 'co2' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, co2_category
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT space_id, event_date, ch2o_category AS category, 'ch2o' AS pollutant, COUNT(*) AS category_count
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date, ch2o_category
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_totals AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
SUM(category_count) AS total_count
|
||||||
|
FROM daily_category_counts
|
||||||
|
where pollutant = 'aqi'
|
||||||
|
GROUP BY space_id, event_date
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Pivot Categories into Columns
|
||||||
|
daily_percentages AS (
|
||||||
|
select
|
||||||
|
dt.space_id,
|
||||||
|
dt.event_date,
|
||||||
|
-- AQI CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_aqi_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'aqi' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_aqi_percentage,
|
||||||
|
-- PM25 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm25_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm25' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm25_percentage,
|
||||||
|
-- PM10 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_pm10_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'pm10' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_pm10_percentage,
|
||||||
|
-- VOC CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_voc_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'voc' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_voc_percentage,
|
||||||
|
-- CO2 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_co2_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'co2' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_co2_percentage,
|
||||||
|
-- CH20 CATEGORIES
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Good' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS good_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Moderate' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS moderate_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy for Sensitive Groups' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_sensitive_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Very Unhealthy' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS very_unhealthy_ch2o_percentage,
|
||||||
|
ROUND(COALESCE(SUM(CASE WHEN dcc.category = 'Hazardous' and dcc.pollutant = 'ch2o' THEN dcc.category_count ELSE 0 END) * 100.0 / dt.total_count, 0), 2) AS hazardous_ch2o_percentage
|
||||||
|
FROM daily_totals dt
|
||||||
|
LEFT JOIN daily_category_counts dcc
|
||||||
|
ON dt.space_id = dcc.space_id AND dt.event_date = dcc.event_date
|
||||||
|
GROUP BY dt.space_id, dt.event_date, dt.total_count
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_averages AS (
|
||||||
|
SELECT
|
||||||
|
space_id,
|
||||||
|
event_date,
|
||||||
|
-- AQI
|
||||||
|
ROUND(AVG(hourly_min_aqi)::numeric, 2) AS daily_min_aqi,
|
||||||
|
ROUND(AVG(hourly_avg_aqi)::numeric, 2) AS daily_avg_aqi,
|
||||||
|
ROUND(AVG(hourly_max_aqi)::numeric, 2) AS daily_max_aqi,
|
||||||
|
-- PM25
|
||||||
|
ROUND(AVG(pm25_min_f)::numeric, 2) AS daily_min_pm25,
|
||||||
|
ROUND(AVG(pm25_avg_f)::numeric, 2) AS daily_avg_pm25,
|
||||||
|
ROUND(AVG(pm25_max_f)::numeric, 2) AS daily_max_pm25,
|
||||||
|
-- PM10
|
||||||
|
ROUND(AVG(pm10_min_f)::numeric, 2) AS daily_min_pm10,
|
||||||
|
ROUND(AVG(pm10_avg_f)::numeric, 2) AS daily_avg_pm10,
|
||||||
|
ROUND(AVG(pm10_max_f)::numeric, 2) AS daily_max_pm10,
|
||||||
|
-- VOC
|
||||||
|
ROUND(AVG(voc_min_f)::numeric, 2) AS daily_min_voc,
|
||||||
|
ROUND(AVG(voc_avg_f)::numeric, 2) AS daily_avg_voc,
|
||||||
|
ROUND(AVG(voc_max_f)::numeric, 2) AS daily_max_voc,
|
||||||
|
-- CO2
|
||||||
|
ROUND(AVG(co2_min_f)::numeric, 2) AS daily_min_co2,
|
||||||
|
ROUND(AVG(co2_avg_f)::numeric, 2) AS daily_avg_co2,
|
||||||
|
ROUND(AVG(co2_max_f)::numeric, 2) AS daily_max_co2,
|
||||||
|
-- CH2O
|
||||||
|
ROUND(AVG(ch2o_min_f)::numeric, 2) AS daily_min_ch2o,
|
||||||
|
ROUND(AVG(ch2o_avg_f)::numeric, 2) AS daily_avg_ch2o,
|
||||||
|
ROUND(AVG(ch2o_max_f)::numeric, 2) AS daily_max_ch2o
|
||||||
|
|
||||||
|
FROM hourly_results
|
||||||
|
GROUP BY space_id, event_date
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
p.space_id,
|
||||||
|
p.event_date,
|
||||||
|
p.good_aqi_percentage, p.moderate_aqi_percentage, p.unhealthy_sensitive_aqi_percentage, p.unhealthy_aqi_percentage, p.very_unhealthy_aqi_percentage, p.hazardous_aqi_percentage,
|
||||||
|
a.daily_avg_aqi,a.daily_max_aqi, a.daily_min_aqi,
|
||||||
|
p.good_pm25_percentage, p.moderate_pm25_percentage, p.unhealthy_sensitive_pm25_percentage, p.unhealthy_pm25_percentage, p.very_unhealthy_pm25_percentage, p.hazardous_pm25_percentage,
|
||||||
|
a.daily_avg_pm25,a.daily_max_pm25, a.daily_min_pm25,
|
||||||
|
p.good_pm10_percentage, p.moderate_pm10_percentage, p.unhealthy_sensitive_pm10_percentage, p.unhealthy_pm10_percentage, p.very_unhealthy_pm10_percentage, p.hazardous_pm10_percentage,
|
||||||
|
a.daily_avg_pm10, a.daily_max_pm10, a.daily_min_pm10,
|
||||||
|
p.good_voc_percentage, p.moderate_voc_percentage, p.unhealthy_sensitive_voc_percentage, p.unhealthy_voc_percentage, p.very_unhealthy_voc_percentage, p.hazardous_voc_percentage,
|
||||||
|
a.daily_avg_voc, a.daily_max_voc, a.daily_min_voc,
|
||||||
|
p.good_co2_percentage, p.moderate_co2_percentage, p.unhealthy_sensitive_co2_percentage, p.unhealthy_co2_percentage, p.very_unhealthy_co2_percentage, p.hazardous_co2_percentage,
|
||||||
|
a.daily_avg_co2,a.daily_max_co2, a.daily_min_co2,
|
||||||
|
p.good_ch2o_percentage, p.moderate_ch2o_percentage, p.unhealthy_sensitive_ch2o_percentage, p.unhealthy_ch2o_percentage, p.very_unhealthy_ch2o_percentage, p.hazardous_ch2o_percentage,
|
||||||
|
a.daily_avg_ch2o,a.daily_max_ch2o, a.daily_min_ch2o
|
||||||
|
FROM daily_percentages p
|
||||||
|
LEFT JOIN daily_averages a
|
||||||
|
ON p.space_id = a.space_id AND p.event_date = a.event_date
|
||||||
|
ORDER BY p.space_id, p.event_date;
|
||||||
|
|
||||||
|
|
@ -1,91 +1,90 @@
|
|||||||
-- Step 1: Get device presence events with previous timestamps
|
WITH presence_logs AS (
|
||||||
WITH start_date AS (
|
SELECT
|
||||||
SELECT
|
d.space_device_uuid AS space_id,
|
||||||
d.uuid AS device_id,
|
l.device_id,
|
||||||
d.space_device_uuid AS space_id,
|
l.event_time,
|
||||||
l.value,
|
l.value,
|
||||||
l.event_time::timestamp AS event_time,
|
LAG(l.event_time) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_time,
|
||||||
LAG(l.event_time::timestamp) OVER (PARTITION BY d.uuid ORDER BY l.event_time) AS prev_timestamp
|
LAG(l.value) OVER (PARTITION BY l.device_id ORDER BY l.event_time) AS prev_value
|
||||||
FROM device d
|
FROM device d
|
||||||
LEFT JOIN "device-status-log" l
|
JOIN "device-status-log" l ON d.uuid = l.device_id
|
||||||
ON d.uuid = l.device_id
|
JOIN product p ON p.uuid = d.product_device_uuid
|
||||||
LEFT JOIN product p
|
WHERE l.code = 'presence_state'
|
||||||
ON p.uuid = d.product_device_uuid
|
AND p.cat_name = 'hps'
|
||||||
WHERE p.cat_name = 'hps'
|
|
||||||
AND l.code = 'presence_state'
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 2: Identify periods when device reports "none"
|
-- Intervals when device was in 'presence' (between prev_time and event_time when value='none')
|
||||||
device_none_periods AS (
|
presence_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
device_id,
|
prev_time AS start_time,
|
||||||
event_time AS empty_from,
|
event_time AS end_time
|
||||||
LEAD(event_time) OVER (PARTITION BY device_id ORDER BY event_time) AS empty_until
|
FROM presence_logs
|
||||||
FROM start_date
|
WHERE value = 'none'
|
||||||
WHERE value = 'none'
|
AND prev_value = 'presence'
|
||||||
|
AND prev_time IS NOT NULL
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 3: Clip the "none" periods to the edges of each day
|
-- Split intervals across days
|
||||||
clipped_device_none_periods AS (
|
split_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
GREATEST(empty_from, DATE_TRUNC('day', empty_from)) AS clipped_from,
|
generate_series(
|
||||||
LEAST(empty_until, DATE_TRUNC('day', empty_until) + INTERVAL '1 day') AS clipped_until
|
date_trunc('day', start_time),
|
||||||
FROM device_none_periods
|
date_trunc('day', end_time),
|
||||||
WHERE empty_until IS NOT NULL
|
interval '1 day'
|
||||||
|
)::date AS event_date,
|
||||||
|
GREATEST(start_time, date_trunc('day', start_time)) AS interval_start,
|
||||||
|
LEAST(end_time, date_trunc('day', end_time) + interval '1 day') AS interval_end
|
||||||
|
FROM presence_intervals
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 4: Break multi-day periods into daily intervals
|
-- Mark and group overlapping intervals per space per day
|
||||||
generated_daily_intervals AS (
|
ordered_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
gs::date AS day,
|
event_date,
|
||||||
GREATEST(clipped_from, gs) AS interval_start,
|
interval_start,
|
||||||
LEAST(clipped_until, gs + INTERVAL '1 day') AS interval_end
|
interval_end,
|
||||||
FROM clipped_device_none_periods,
|
LAG(interval_end) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS prev_end
|
||||||
LATERAL generate_series(DATE_TRUNC('day', clipped_from), DATE_TRUNC('day', clipped_until), INTERVAL '1 day') AS gs
|
FROM split_intervals
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 5: Merge overlapping or adjacent intervals per day
|
grouped_intervals AS (
|
||||||
|
SELECT *,
|
||||||
|
SUM(CASE
|
||||||
|
WHEN prev_end IS NULL OR interval_start > prev_end THEN 1
|
||||||
|
ELSE 0
|
||||||
|
END) OVER (PARTITION BY space_id, event_date ORDER BY interval_start) AS grp
|
||||||
|
FROM ordered_intervals
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Merge overlapping intervals per group
|
||||||
merged_intervals AS (
|
merged_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
day,
|
event_date,
|
||||||
interval_start,
|
MIN(interval_start) AS merged_start,
|
||||||
interval_end
|
MAX(interval_end) AS merged_end
|
||||||
FROM (
|
FROM grouped_intervals
|
||||||
SELECT
|
GROUP BY space_id, event_date, grp
|
||||||
space_id,
|
|
||||||
day,
|
|
||||||
interval_start,
|
|
||||||
interval_end,
|
|
||||||
LAG(interval_end) OVER (PARTITION BY space_id, day ORDER BY interval_start) AS prev_end
|
|
||||||
FROM generated_daily_intervals
|
|
||||||
) sub
|
|
||||||
WHERE prev_end IS NULL OR interval_start > prev_end
|
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Step 6: Sum up total missing seconds (device reported "none") per day
|
-- Sum durations of merged intervals
|
||||||
missing_seconds_per_day AS (
|
summed_intervals AS (
|
||||||
SELECT
|
SELECT
|
||||||
space_id,
|
space_id,
|
||||||
day AS missing_date,
|
event_date,
|
||||||
SUM(EXTRACT(EPOCH FROM (interval_end - interval_start))) AS total_missing_seconds
|
SUM(EXTRACT(EPOCH FROM (merged_end - merged_start))) AS raw_occupied_seconds
|
||||||
FROM merged_intervals
|
FROM merged_intervals
|
||||||
GROUP BY space_id, day
|
GROUP BY space_id, event_date
|
||||||
),
|
|
||||||
|
|
||||||
-- Step 7: Calculate total occupied time per day (86400 - missing)
|
|
||||||
occupied_seconds_per_day AS (
|
|
||||||
SELECT
|
|
||||||
space_id,
|
|
||||||
missing_date as date,
|
|
||||||
86400 - total_missing_seconds AS total_occupied_seconds
|
|
||||||
FROM missing_seconds_per_day
|
|
||||||
)
|
)
|
||||||
|
|
||||||
-- Final Output
|
-- Final output with capped seconds and percentage
|
||||||
SELECT *
|
SELECT
|
||||||
FROM occupied_seconds_per_day
|
space_id,
|
||||||
ORDER BY 1,2;
|
event_date,
|
||||||
|
LEAST(raw_occupied_seconds, 86400) AS occupied_seconds,
|
||||||
|
ROUND(LEAST(raw_occupied_seconds, 86400) / 86400.0 * 100, 2) AS occupancy_percentage
|
||||||
|
FROM summed_intervals
|
||||||
|
ORDER BY space_id, event_date;
|
18
libs/common/src/util/calculate.aqi.ts
Normal file
18
libs/common/src/util/calculate.aqi.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
export function calculateAQI(pm2_5: number): number {
|
||||||
|
const breakpoints = [
|
||||||
|
{ pmLow: 0.0, pmHigh: 12.0, aqiLow: 0, aqiHigh: 50 },
|
||||||
|
{ pmLow: 12.1, pmHigh: 35.4, aqiLow: 51, aqiHigh: 100 },
|
||||||
|
{ pmLow: 35.5, pmHigh: 55.4, aqiLow: 101, aqiHigh: 150 },
|
||||||
|
{ pmLow: 55.5, pmHigh: 150.4, aqiLow: 151, aqiHigh: 200 },
|
||||||
|
{ pmLow: 150.5, pmHigh: 250.4, aqiLow: 201, aqiHigh: 300 },
|
||||||
|
{ pmLow: 250.5, pmHigh: 500.4, aqiLow: 301, aqiHigh: 500 },
|
||||||
|
];
|
||||||
|
|
||||||
|
const bp = breakpoints.find((b) => pm2_5 >= b.pmLow && pm2_5 <= b.pmHigh);
|
||||||
|
if (!bp) return pm2_5 > 500.4 ? 500 : 0; // Handle out-of-range values
|
||||||
|
|
||||||
|
return Math.round(
|
||||||
|
((bp.aqiHigh - bp.aqiLow) / (bp.pmHigh - bp.pmLow)) * (pm2_5 - bp.pmLow) +
|
||||||
|
bp.aqiLow,
|
||||||
|
);
|
||||||
|
}
|
11
libs/common/src/util/device-utils.ts
Normal file
11
libs/common/src/util/device-utils.ts
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import { DeviceEntity } from '../modules/device/entities';
|
||||||
|
|
||||||
|
export function addSpaceUuidToDevices(
|
||||||
|
devices: DeviceEntity[],
|
||||||
|
spaceUuid: string,
|
||||||
|
): DeviceEntity[] {
|
||||||
|
return devices.map((device) => {
|
||||||
|
(device as any).spaceUuid = spaceUuid;
|
||||||
|
return device;
|
||||||
|
});
|
||||||
|
}
|
8
libs/common/src/util/email/batch-email.interface.ts
Normal file
8
libs/common/src/util/email/batch-email.interface.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
export interface BatchEmailData {
|
||||||
|
base: { from: { email: string }; template_uuid: string };
|
||||||
|
requests: Array<{
|
||||||
|
to: { email: string }[];
|
||||||
|
template_variables: Record<string, any>;
|
||||||
|
}>;
|
||||||
|
isBatch: true;
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user