From 092f2ec0f32ce1748d62d35badc3aa27674e913f Mon Sep 17 00:00:00 2001 From: kosukesuenaga Date: Tue, 25 Nov 2025 14:54:01 +0900 Subject: [PATCH 1/6] 20251125 save --- .gitignore | 10 +++- functions/generate_minutes/index.ts | 16 ++++++ functions/generate_minutes/package.json | 31 ++++++++++ functions/generate_minutes/src/apiRouter.ts | 35 ++++++++++++ functions/generate_minutes/src/logics/ai.ts | 38 +++++++++++++ functions/generate_minutes/src/logics/date.ts | 25 ++++++++ .../src/logics/googleDrive.ts | 36 ++++++++++++ .../generate_minutes/src/logics/hubspot.ts | 0 .../generate_minutes/src/logics/process.ts | 57 +++++++++++++++++++ .../generate_minutes/src/logics/storage.ts | 39 +++++++++++++ functions/generate_minutes/tsconfig.json | 13 +++++ 11 files changed, 299 insertions(+), 1 deletion(-) create mode 100644 functions/generate_minutes/index.ts create mode 100644 functions/generate_minutes/package.json create mode 100644 functions/generate_minutes/src/apiRouter.ts create mode 100644 functions/generate_minutes/src/logics/ai.ts create mode 100644 functions/generate_minutes/src/logics/date.ts create mode 100644 functions/generate_minutes/src/logics/googleDrive.ts create mode 100644 functions/generate_minutes/src/logics/hubspot.ts create mode 100644 functions/generate_minutes/src/logics/process.ts create mode 100644 functions/generate_minutes/src/logics/storage.ts create mode 100644 functions/generate_minutes/tsconfig.json diff --git a/.gitignore b/.gitignore index a9be4e2..2a32f03 100755 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,12 @@ venv/ __pycache__/ *.csv -request.json \ No newline at end of file +request.json + +node_modules/ +dist/ +.env_dev +.env +.env_prod +credentials.json +package-lock.json \ No newline at end of file diff --git a/functions/generate_minutes/index.ts b/functions/generate_minutes/index.ts new file mode 100644 index 0000000..45ac857 --- /dev/null +++ b/functions/generate_minutes/index.ts @@ -0,0 +1,16 @@ +// src/index.ts +import express from "express"; +import type { Express } from "express"; +import router from "./src/apiRouter"; + +const app: Express = express(); +app.use("/api", router); + +export const helloHttp = app; +// export const helloHttp = (req: Request, res: Response): void => { +// // console.log("Function invoked:", new Date().toISOString()); +// console.log("path:", req.path, "method:", req.method); + +// const name = (req.query.name as string) ?? "World"; +// res.status(200).send(`Hello, ${name} from TypeScript Cloud Functions!`); +// }; diff --git a/functions/generate_minutes/package.json b/functions/generate_minutes/package.json new file mode 100644 index 0000000..7de412b --- /dev/null +++ b/functions/generate_minutes/package.json @@ -0,0 +1,31 @@ +{ + "name": "generate_minutes", + "version": "1.0.0", + "main": "dist/index.js", + "scripts": { + "build": "tsc", + "start": "npm run build && functions-framework --target=helloHttp --port=8080 --source=dist/index.js", + "dev": "dotenv -e .env_dev -- nodemon --watch . --exec \"functions-framework --target=helloHttp --port=8080\"", + "debug": "dotenv -e .env_dev -- node --inspect node_modules/.bin/functions-framework --source=dist/index.js --target=helloHttp", + "watch": "concurrently \"dotenv -e .env_dev -- npm run build -- --watch\" \"dotenv -e .env_dev -- nodemon --watch ./dist/ --exec npm run debug\"" + }, + "devDependencies": { + "@google-cloud/functions-framework": "^3.0.0", + "@types/express": "^4.17.0", + "@types/node": "^20.0.0", + "dotenv-cli": "^11.0.0", + "nodemon": "^3.1.11", + "ts-node": "^10.9.2", + "typescript": "^5.0.0" + }, + "dependencies": { + "@google-cloud/local-auth": "^2.1.0", + "@google-cloud/storage": "^7.17.3", + "@google/genai": "^1.30.0", + "concurrently": "^9.2.1", + "dotenv": "^17.2.3", + "express": "^4.21.2", + "googleapis": "^105.0.0", + "zod": "^4.1.13" + } +} diff --git a/functions/generate_minutes/src/apiRouter.ts b/functions/generate_minutes/src/apiRouter.ts new file mode 100644 index 0000000..cf8429c --- /dev/null +++ b/functions/generate_minutes/src/apiRouter.ts @@ -0,0 +1,35 @@ +import express from "express"; +import { storageController } from "./logics/storage"; +import { MiiTelWebhookSchema, processRequest } from "./logics/process"; + +const router = express.Router(); + +router.get("/hello", (req, res) => res.send("こんにちは!")); + +router.post("/miitel", async(req, res) => { + const body = req.body; + // await storageController.saveToGCS("request_log",'test', JSON.stringify(req.body)); + + const parsedBody = MiiTelWebhookSchema.safeParse(body); + if(!parsedBody.success) { + console.error("Invalid webhook body:", parsedBody.error); + return; + } + console.log("miitel webhook received:", parsedBody.data.video.id); + + await processRequest(parsedBody.data.video); + + res.send("こんにちは!"); +}); + +router.post("/getLog", async(req, res) => { + console.log(req.body); + const meetingId = req.body.meetingId; + const exist = await storageController.existsInGCS("request_log", "test.json.gz"); + console.log("Log exists:", exist); + const log = await storageController.loadFromGCS("request_log", meetingId + ".json.gz"); + console.log(log) + res.send(log); +}); + +export default router; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/ai.ts b/functions/generate_minutes/src/logics/ai.ts new file mode 100644 index 0000000..ee37ec6 --- /dev/null +++ b/functions/generate_minutes/src/logics/ai.ts @@ -0,0 +1,38 @@ +import { GoogleGenAI } from "@google/genai"; + + +const aiClient = new GoogleGenAI({ + apiKey: process.env.GEMINI_API_KEY, +}); + +export const aiController = { + generateMinutes: async(text: string) => { + const prompt = ` + あなたは議事録作成のプロフェッショナルです。以下の「文字起こし結果」は営業マンが録音した商談の文字起こしです。以下の制約条件に従い、最高の商談報告の議事録を作成してください。 + + 制約条件: + 1. 文字起こし結果にはAIによる書き起こしミスがある可能性を考慮してください。 + 2. 冒頭に主要な「決定事項」と「アクションアイテム」をまとめてください。 + 3. 議論のポイントを議題ごとに要約してください。 + 4. 見出しや箇条書きを用いて、情報が探しやすい構造で簡潔かつ明瞭に記述してください。 + 5. 要約は500文字以内に収めてください。 + 6. 箇条書き形式で簡潔にまとめてください。 + 7. マークダウン記法は使わず、各項目を「■」や「・」等を使って見やすくしてください。 + + 文字起こし結果: + ${text} + ` + + try { + const response = await aiClient.models.generateContent({ + model: process.env.GEMINI_MODEL_ID || "gemini-2.5-flash", + contents: prompt, + }) + console.log("AI Response:", response.text); + return response.text; + } catch (error) { + console.error("AI Generation Error:", error); + return null; + } + } +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/date.ts b/functions/generate_minutes/src/logics/date.ts new file mode 100644 index 0000000..a7461c1 --- /dev/null +++ b/functions/generate_minutes/src/logics/date.ts @@ -0,0 +1,25 @@ + +export const dateController = { + convertToJst: (date: string): Date => { + const utcDate = new Date(date); + const jstDate = utcDate.toLocaleString('ja-JP', { timeZone: 'Asia/Tokyo' }) + return new Date(jstDate); + }, + getFormattedDate: (date: Date, format: string): string => { + const symbol = { + M: date.getMonth() + 1, + d: date.getDate(), + h: date.getHours(), + m: date.getMinutes(), + s: date.getSeconds(), + }; + + const formatted = format.replace(/(M+|d+|h+|m+|s+)/g, (v) => + ((v.length > 1 ? "0" : "") + symbol[v.slice(-1) as keyof typeof symbol]).slice(-2) + ); + + return formatted.replace(/(y+)/g, (v) => + date.getFullYear().toString().slice(-v.length) + ); + } +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/googleDrive.ts b/functions/generate_minutes/src/logics/googleDrive.ts new file mode 100644 index 0000000..bf13ea6 --- /dev/null +++ b/functions/generate_minutes/src/logics/googleDrive.ts @@ -0,0 +1,36 @@ +import { authenticate } from "@google-cloud/local-auth"; +import { JSONClient } from "google-auth-library/build/src/auth/googleauth"; +import { google } from "googleapis"; +import path from "path"; + +const SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] +const CREDENTIALS_PATH = path.join(process.cwd(), 'credentials.json'); + +export const googleDriveController = { + getAuth: async():Promise => { + const auth = await new google.auth.GoogleAuth({ + keyFile: CREDENTIALS_PATH, + scopes: SCOPES, + }); + return auth; + }, + checkConnection: async() => { + const auth = await googleDriveController.getAuth(); + // console.log("Google Drive client authenticated."); + const drive = google.drive({ version: "v3", auth: auth}); + const folder = '1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw'; + const res = await drive.files.list({ + q: `'${folder}' in parents`, + pageSize: 10, + fields: "files(id, name)", + }); + console.log("Files:"); + console.log(res.data.files); + }, + uploadFile: async() => { + + }, + createNewFile: async() => { + + }, +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/hubspot.ts b/functions/generate_minutes/src/logics/hubspot.ts new file mode 100644 index 0000000..e69de29 diff --git a/functions/generate_minutes/src/logics/process.ts b/functions/generate_minutes/src/logics/process.ts new file mode 100644 index 0000000..b52d62a --- /dev/null +++ b/functions/generate_minutes/src/logics/process.ts @@ -0,0 +1,57 @@ +import z from "zod"; +import { aiController } from "./ai"; +import { dateController } from "./date"; +import { googleDriveController } from "./googleDrive"; + +const VideoInfoSchema = z.looseObject({ + id: z.string(), + title: z.string(), + starts_at: z.string(), + ends_at: z.string(), + access_permission: z.string(), + host: z.object({ + login_id: z.string(), + user_name: z.string(), + }), + speech_recognition: z.object({ + raw: z.string(), + }) +}); + +type VideoInfo = z.infer; + +export const MiiTelWebhookSchema = z.object({ + video: VideoInfoSchema, +}); + +// export type MiiTelWebhook = z.infer; + +export const processRequest = async(videoInfo: VideoInfo) => { + const videoId = videoInfo.id; + const title = videoInfo.title; + const startsAt = videoInfo.starts_at; + const endsAt = videoInfo.ends_at; + const accessPermission = videoInfo.access_permission; + + const host_id = videoInfo.host.login_id; + const host_name = videoInfo.host.user_name; + + const speechRecognition = videoInfo.speech_recognition.raw; + + console.log(startsAt); + const jstStartsAt = dateController.convertToJst(startsAt); + const jstEndsAt = dateController.convertToJst(endsAt); + + googleDriveController.checkConnection(); + // console.log(dateController.getFormattedDate(startsAtJst, "yyyy/MM/dd hh:mm:ss")); + // console.log(endsAt); + // console.log("Processing video:", host_id, host_name, title); + if(accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return; + + + + // Save Request Log to Google Drive + // const minute = await aiController.generateMinutes(speechRecognition); + // console.log(minute); + + }; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/storage.ts b/functions/generate_minutes/src/logics/storage.ts new file mode 100644 index 0000000..31b14df --- /dev/null +++ b/functions/generate_minutes/src/logics/storage.ts @@ -0,0 +1,39 @@ +import { Storage } from "@google-cloud/storage"; +import zlib from "zlib"; + +const csClient = new Storage({ + projectId: 'datacom-poc', +} +); +const BUCKET_NAME = "meeting-report-data"; +const bucket = csClient.bucket(BUCKET_NAME); + +export const storageController = { + saveToGCS: async(folder: string, filename: string, text: string) => { + const gzipped = zlib.gzipSync(text); + const file = bucket.file((`${folder}/${filename}.json.gz`)); + await file.save(gzipped, { + contentType: 'application/gzip', + }) + }, + loadFromGCS: async(folder: string, filename: string): Promise => { + const file = bucket.file(`${folder}/${filename}`); + // console.log("loading file:", file.name); + try { + const [data] = await file.download(); + return zlib.gunzipSync(data).toString("utf-8"); + } catch (err: any) { + return null; + } + }, + existsInGCS: async(folder: string, filename: string): Promise => { + const file = bucket.file((`${folder}/${filename}`)); + console.log("checking file:", file.name); + try { + const [exist] = await file.exists(); + return exist; + } catch (err: any) { + return false; + } + }, +}; diff --git a/functions/generate_minutes/tsconfig.json b/functions/generate_minutes/tsconfig.json new file mode 100644 index 0000000..1089c10 --- /dev/null +++ b/functions/generate_minutes/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "moduleResolution": "node", + "resolveJsonModule": true, + "skipLibCheck": true + }, + // "include": ["", "index.ts"] +} From 395fba645d825f6a0595165c013c33d96e8272c1 Mon Sep 17 00:00:00 2001 From: kosukesuenaga Date: Fri, 5 Dec 2025 14:12:11 +0900 Subject: [PATCH 2/6] python -> node.js --- .gitignore | 1 + functions/append-log-to-sheet/.env_debug | 5 - functions/append-log-to-sheet/.env_dev | 5 - functions/append-log-to-sheet/.env_prod | 5 - .../_scripts/deploy_dev.sh | 33 --- functions/append-log-to-sheet/source/main.py | 267 ------------------ .../source/requirements.txt | 5 - .../create-hubspot-meeting-log/.env_debug | 5 - functions/create-hubspot-meeting-log/.env_dev | 5 - .../create-hubspot-meeting-log/.env_prod | 5 - .../_scripts/deploy_dev.sh | 33 --- .../create-hubspot-meeting-log/source/main.py | 200 ------------- .../source/requirements.txt | 8 - functions/create-log-sheet/.env_debug | 5 - functions/create-log-sheet/.env_dev | 5 - functions/create-log-sheet/.env_prod | 5 - .../create-log-sheet/_scripts/deploy_dev.sh | 33 --- functions/create-log-sheet/source/main.py | 218 -------------- .../create-log-sheet/source/requirements.txt | 5 - functions/export-companies-to-gcs/.env_debug | 5 - functions/export-companies-to-gcs/.env_dev | 5 - functions/export-companies-to-gcs/.env_prod | 5 - .../export-companies-to-gcs/source/main.py | 87 ------ .../source/requirements.txt | 5 - functions/export-owners-to-gcs/.env_debug | 5 - functions/export-owners-to-gcs/.env_dev | 5 - functions/export-owners-to-gcs/.env_prod | 5 - functions/export-owners-to-gcs/source/main.py | 90 ------ .../source/requirements.txt | 5 - functions/generate-meeting-minutes/.env_debug | 3 - functions/generate-meeting-minutes/.env_dev | 3 - functions/generate-meeting-minutes/.env_prod | 3 - .../_scripts/deploy_dev.sh | 35 --- .../generate-meeting-minutes/source/main.py | 132 --------- .../source/requirements.txt | 5 - functions/generate_minutes/.DS_Store | Bin 0 -> 8196 bytes functions/generate_minutes/package.json | 6 +- functions/generate_minutes/serverConfig.ts | 24 ++ functions/generate_minutes/src/apiRouter.ts | 85 +++++- functions/generate_minutes/src/logics/date.ts | 9 +- .../generate_minutes/src/logics/error.ts | 29 ++ functions/generate_minutes/src/logics/file.ts | 53 ++++ .../generate_minutes/src/logics/fuzzyMatch.ts | 62 ++++ .../src/logics/googleDrive.ts | 234 +++++++++++++-- .../generate_minutes/src/logics/hubspot.ts | 116 ++++++++ .../generate_minutes/src/logics/process.ts | 131 +++++++-- .../generate_minutes/src/logics/storage.ts | 26 +- .../generate_minutes/src/stores/errorCodes.ts | 26 ++ .../.env_debug | 4 - .../.env_dev | 4 - .../.env_prod | 4 - .../_scripts/deploy_dev.sh | 33 --- .../source/main.py | 75 ----- .../source/requirements.txt | 4 - functions/upload-minutes-to-drive/.env_debug | 2 - functions/upload-minutes-to-drive/.env_dev | 2 - functions/upload-minutes-to-drive/.env_prod | 2 - .../_scripts/deploy_dev.sh | 33 --- .../upload-minutes-to-drive/source/main.py | 128 --------- .../source/requirements.txt | 5 - .../_scripts/deploy_dev.sh | 14 - workflows/workflow-create-minutes/main.yaml | 71 ----- 62 files changed, 726 insertions(+), 1702 deletions(-) delete mode 100755 functions/append-log-to-sheet/.env_debug delete mode 100755 functions/append-log-to-sheet/.env_dev delete mode 100755 functions/append-log-to-sheet/.env_prod delete mode 100755 functions/append-log-to-sheet/_scripts/deploy_dev.sh delete mode 100755 functions/append-log-to-sheet/source/main.py delete mode 100755 functions/append-log-to-sheet/source/requirements.txt delete mode 100755 functions/create-hubspot-meeting-log/.env_debug delete mode 100755 functions/create-hubspot-meeting-log/.env_dev delete mode 100755 functions/create-hubspot-meeting-log/.env_prod delete mode 100755 functions/create-hubspot-meeting-log/_scripts/deploy_dev.sh delete mode 100755 functions/create-hubspot-meeting-log/source/main.py delete mode 100755 functions/create-hubspot-meeting-log/source/requirements.txt delete mode 100755 functions/create-log-sheet/.env_debug delete mode 100755 functions/create-log-sheet/.env_dev delete mode 100755 functions/create-log-sheet/.env_prod delete mode 100755 functions/create-log-sheet/_scripts/deploy_dev.sh delete mode 100755 functions/create-log-sheet/source/main.py delete mode 100755 functions/create-log-sheet/source/requirements.txt delete mode 100755 functions/export-companies-to-gcs/.env_debug delete mode 100755 functions/export-companies-to-gcs/.env_dev delete mode 100755 functions/export-companies-to-gcs/.env_prod delete mode 100755 functions/export-companies-to-gcs/source/main.py delete mode 100755 functions/export-companies-to-gcs/source/requirements.txt delete mode 100755 functions/export-owners-to-gcs/.env_debug delete mode 100755 functions/export-owners-to-gcs/.env_dev delete mode 100755 functions/export-owners-to-gcs/.env_prod delete mode 100755 functions/export-owners-to-gcs/source/main.py delete mode 100755 functions/export-owners-to-gcs/source/requirements.txt delete mode 100755 functions/generate-meeting-minutes/.env_debug delete mode 100755 functions/generate-meeting-minutes/.env_dev delete mode 100755 functions/generate-meeting-minutes/.env_prod delete mode 100755 functions/generate-meeting-minutes/_scripts/deploy_dev.sh delete mode 100755 functions/generate-meeting-minutes/source/main.py delete mode 100755 functions/generate-meeting-minutes/source/requirements.txt create mode 100644 functions/generate_minutes/.DS_Store create mode 100644 functions/generate_minutes/serverConfig.ts create mode 100644 functions/generate_minutes/src/logics/error.ts create mode 100644 functions/generate_minutes/src/logics/file.ts create mode 100644 functions/generate_minutes/src/logics/fuzzyMatch.ts create mode 100644 functions/generate_minutes/src/stores/errorCodes.ts delete mode 100755 functions/trigger-minutes-workflow-from-miitel/.env_debug delete mode 100755 functions/trigger-minutes-workflow-from-miitel/.env_dev delete mode 100755 functions/trigger-minutes-workflow-from-miitel/.env_prod delete mode 100755 functions/trigger-minutes-workflow-from-miitel/_scripts/deploy_dev.sh delete mode 100755 functions/trigger-minutes-workflow-from-miitel/source/main.py delete mode 100755 functions/trigger-minutes-workflow-from-miitel/source/requirements.txt delete mode 100755 functions/upload-minutes-to-drive/.env_debug delete mode 100755 functions/upload-minutes-to-drive/.env_dev delete mode 100755 functions/upload-minutes-to-drive/.env_prod delete mode 100755 functions/upload-minutes-to-drive/_scripts/deploy_dev.sh delete mode 100755 functions/upload-minutes-to-drive/source/main.py delete mode 100755 functions/upload-minutes-to-drive/source/requirements.txt delete mode 100755 workflows/workflow-create-minutes/_scripts/deploy_dev.sh delete mode 100755 workflows/workflow-create-minutes/main.yaml diff --git a/.gitignore b/.gitignore index 2a32f03..cce9094 100755 --- a/.gitignore +++ b/.gitignore @@ -19,4 +19,5 @@ dist/ .env .env_prod credentials.json +credentials_dev.json package-lock.json \ No newline at end of file diff --git a/functions/append-log-to-sheet/.env_debug b/functions/append-log-to-sheet/.env_debug deleted file mode 100755 index a672143..0000000 --- a/functions/append-log-to-sheet/.env_debug +++ /dev/null @@ -1,5 +0,0 @@ -KEY_PATH=projects/32472615575/secrets/sa-access-google-drive-key -LOG_FOLDER_ID=1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX -MEETING_FOLDER_ID=1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw -HUBSPOT_COMPANY_URL=https://app-na2.hubspot.com/contacts/242960467/record/0-2 -MODE=dev diff --git a/functions/append-log-to-sheet/.env_dev b/functions/append-log-to-sheet/.env_dev deleted file mode 100755 index b9a7bd3..0000000 --- a/functions/append-log-to-sheet/.env_dev +++ /dev/null @@ -1,5 +0,0 @@ -KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key -LOG_FOLDER_ID: 1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX -MEETING_FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw -HUBSPOT_COMPANY_URL: https://app-na2.hubspot.com/contacts/242960467/record/0-2 -MODE: dev diff --git a/functions/append-log-to-sheet/.env_prod b/functions/append-log-to-sheet/.env_prod deleted file mode 100755 index d7650d0..0000000 --- a/functions/append-log-to-sheet/.env_prod +++ /dev/null @@ -1,5 +0,0 @@ -KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key -LOG_FOLDER_ID: 1arL6AxpvA7N6Umg4wdrdAcRWBdKc-Jfb -MEETING_FOLDER_ID: 0AGT_1dSq66qYUk9PVA -HUBSPOT_COMPANY_URL: https://app.hubspot.com/contacts/22400567/record/0-2 -MODE: production diff --git a/functions/append-log-to-sheet/_scripts/deploy_dev.sh b/functions/append-log-to-sheet/_scripts/deploy_dev.sh deleted file mode 100755 index deee31d..0000000 --- a/functions/append-log-to-sheet/_scripts/deploy_dev.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# プロジェクトIDを設定 -PROJECT_ID="datacom-poc" - -# デプロイする関数名 -FUNCTION_NAME="mrt-append-log-to-sheet" - -# 関数のエントリポイント -ENTRY_POINT="handle_request" - -# ランタイム -RUNTIME="python312" - -# リージョン -REGION="asia-northeast1" - -# 環境変数ファイル -ENV_VARS_FILE=".env_dev" - -gcloud auth application-default set-quota-project $PROJECT_ID -gcloud config set project $PROJECT_ID - -# デプロイコマンド -gcloud functions deploy $FUNCTION_NAME \ - --gen2 \ - --region $REGION \ - --runtime $RUNTIME \ - --source=./source \ - --trigger-http \ - --no-allow-unauthenticated \ - --entry-point $ENTRY_POINT \ - --env-vars-file $ENV_VARS_FILE \ No newline at end of file diff --git a/functions/append-log-to-sheet/source/main.py b/functions/append-log-to-sheet/source/main.py deleted file mode 100755 index 33c5599..0000000 --- a/functions/append-log-to-sheet/source/main.py +++ /dev/null @@ -1,267 +0,0 @@ -import functions_framework -from google.cloud import secretmanager -from google.oauth2 import service_account -from googleapiclient.discovery import build -from googleapiclient.errors import HttpError -import json -import os -from datetime import datetime, timezone, timedelta - - -sm_client = secretmanager.SecretManagerServiceClient() - - -SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] -HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"] - -@functions_framework.http -def handle_request(request): - # POSTリクエストの処理 - if request.method != 'POST': - return ('', 405, {'Allow': 'POST', 'Content-Type': 'application/json'}) # メソッドがPOSTでない場合は405エラーを返す - - """Shows basic usage of the Drive Activity API. - - Prints information about the last 10 events that occured the user's Drive. - """ - try: - request_json = request.get_json() - print(request_json) - title = request_json['title'] # 会議タイトル - document_id = request_json['document_id'] # 議事録ファイルのID - matched_company_id = request_json['matched_company_id'] # マッチした会社ID - matched_company_name = request_json['matched_company_name'] # マッチした会社名 - host_name = request_json['host_name'] # ホストユーザー名 - video_url = request_json['video_url'] # 会議履歴URL - starts_at = request_json['starts_at'] # 開始日時 - - log_folder_id = os.getenv("LOG_FOLDER_ID") # 共有ドライブID - meeting_folder_id = os.getenv("MEETING_FOLDER_ID") # ミーティングフォルダID - hubspot_company_url = os.getenv("HUBSPOT_COMPANY_URL") # HubSpotの会社情報URL - mode = os.getenv("MODE") # モード(devまたはprod) - - service_account_info = get_service_account_info() - # 認証 - credentials = get_credentials(service_account_info) - - # APIクライアントの構築 - drive_service = build("drive", "v3", credentials=credentials) - sheet_service = build("sheets", "v4", credentials=credentials) - - - # 現在日時をJSTに変換 - jst_now = datetime.now(timezone.utc).astimezone(timezone(timedelta(hours=9))) - # JSTの現在日時を文字列に変換 - ym_str = jst_now.strftime("%Y%m") - y_str = jst_now.strftime("%Y") - - - # 年別のフォルダを検索 - target_folder = get_directory_files_dev(drive_service, log_folder_id, y_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, log_folder_id, y_str) - print("target_folder", target_folder) - - year_folder_id = None - if not target_folder: - # フォルダが存在しない場合は新規作成 - year_folder_id = create_new_folder(drive_service, log_folder_id, y_str) - else: - # フォルダが存在する場合はそのIDを使用 - year_folder_id = target_folder[0]['id'] - print("年別のフォルダID:", year_folder_id) - - # スプレッドシートを検索 - target_files = get_directory_files_dev(drive_service, year_folder_id, ym_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, year_folder_id, ym_str) - print("スプレッドシート", target_files) - - if not target_files: - print('not found') - - # スプレッドシートを作成 - spreadsheet_id = create_new_spreadsheet(drive_service, year_folder_id, ym_str) - print("スプレッドシートID:", spreadsheet_id) - # 注意事項追加 - append_log_to_sheet(sheet_service, spreadsheet_id, ["※シート名変更厳禁"]) - # ヘッダーを追加 - append_log_to_sheet(sheet_service, spreadsheet_id, HEADER_VALUES) - - else: - print('found') - # ファイルIDを取得 - spreadsheet_id = target_files[0]['id'] - - documnet_url = f"https://docs.google.com/document/d/{document_id}/edit" if document_id else "" - hubspot_url = f"{hubspot_company_url}/{matched_company_id}" if matched_company_id else "" - # テストログを追加 - row_data = [jst_now.strftime("%Y-%m-%d %H:%M:%S"), - convert_to_jst_ymd(starts_at), - title, - matched_company_name, - host_name, - video_url, - documnet_url, - hubspot_url - ] - append_log_to_sheet(sheet_service, spreadsheet_id, row_data) - print("ログを追加しました:", row_data) - - return (json.dumps({"status": "success"}, ensure_ascii=False), 200, {"Content-Type": "application/json"}) - - except HttpError as error: - # TODO(developer) - Handleerrors from drive activity API. - print(f"An error occurred: {error}") - - -# -# SecretManagerから秘密鍵を取得 -# -def get_service_account_info(): - key_path = os.getenv('KEY_PATH') + "/versions/1" - # 秘密鍵取得 - response = sm_client.access_secret_version(name=key_path) - # 秘密鍵の値をデコード - secret_key = response.payload.data.decode("UTF-8") - return json.loads(secret_key) - -# Google Drive認証 -def get_credentials(service_account_info): - credentials = service_account.Credentials.from_service_account_info( - service_account_info, - scopes=SCOPES - ) - return credentials - - -# 開発用マイドライブからのファイルを取得 -def get_directory_files_dev(service,shared_folder_id, filename): - """ - 対象のディレクトリ配下からファイル名で検索した結果を配列で返す - :param filename: ファイル名 - :param directory_id: ディレクトリID - :param pages_max: 最大ページ探索数 - :return: ファイルリスト - """ - items = [] - page = 0 - pages_max = 10 # 最大ページ数 - while True: - page += 1 - if page == pages_max: - break - results = service.files().list( - corpora="user", - includeItemsFromAllDrives=True, - includeTeamDriveItems=True, - q=f"'{shared_folder_id}' in parents and name = '{filename}' and trashed = false", - supportsAllDrives=True, - pageSize=10, - fields="nextPageToken, files(id, name)").execute() - items += results.get("files", []) - - page_token = results.get('nextPageToken', None) - if page_token is None: - break - return items - -# 本番用共有ドライブからのファイルを取得 -def get_directory_files_prod(service,shared_folder_id,sub_folder_id,filename): - """ - 対象のディレクトリ配下からファイル名で検索した結果を配列で返す - :param filename: ファイル名 - :param directory_id: ディレクトリID - :param pages_max: 最大ページ探索数 - :return: ファイルリスト - """ - items = [] - page = 0 - pages_max = 10 # 最大ページ数 - while True: - page += 1 - if page == pages_max: - break - results = service.files().list( - corpora="drive", - driveId=shared_folder_id, - includeItemsFromAllDrives=True, - includeTeamDriveItems=True, - q=f"'{sub_folder_id}' in parents and name = '{filename}' and trashed = false", - supportsAllDrives=True, - pageSize=10, - fields="nextPageToken, files(id, name, parents)").execute() - items += results.get("files", []) - - page_token = results.get('nextPageToken', None) - if page_token is None: - break - return items - -def create_new_folder(service, sub_folder_id, title): - """ - Google Drive APIを使用して新しいフォルダを作成する - :param service: Google Drive APIのサービスオブジェクト - :param title: フォルダのタイトル - :return: 作成したフォルダのID - """ - file_metadata = { - "name": title, - "parents": [sub_folder_id], # 共有ドライブのIDを指定 - "mimeType": "application/vnd.google-apps.folder", - } - - result = service.files().create(body=file_metadata, fields="id", supportsAllDrives=True).execute() - return result.get('id') - - -def create_new_spreadsheet(service,folder_id,title): - """ - Google Sheets APIを使用して新しいスプレッドシートを作成する - :param service: Google Sheets APIのサービスオブジェクト - :param title: スプレッドシートのタイトル - :return: 作成したスプレッドシートのID - """ - file_metadata = { - 'name': title, - 'parents': [folder_id], # 作成したフォルダのIDを指定 - 'mimeType': 'application/vnd.google-apps.spreadsheet', - } - result = ( - service.files() - .create(body=file_metadata, fields="id", supportsAllDrives=True) - .execute() - ) - return result.get("id") - - -def append_log_to_sheet(service, spreadsheet_id, row_data): - """ - Google Sheets APIを使用してスプレッドシートにログを追加する - :param service: Google Sheets APIのサービスオブジェクト - :param spreadsheet_id: スプレッドシートのID - :param row_data: 追加するログデータ(リスト形式) - """ - body = { - 'values': [row_data] - } - - # スプレッドシートにログを追加 - result = service.spreadsheets().values().append( - spreadsheetId=spreadsheet_id, - range='Sheet1', - valueInputOption="USER_ENTERED", - insertDataOption='INSERT_ROWS', - body=body, - ).execute() - print(f"{result.get('updates').get('updatedCells')} cells appended.") - - - - -def convert_to_jst_ymd(starts_at): - """ - 開始日時をYYYY年MM月DD日形式に変換する - :param starts_at: 開始日時の文字列 - :return: YYYY年MM月DD日形式の文字列 - """ - # 開始日時をUTCからJSTに変換 - dt = datetime.fromisoformat(starts_at.replace("Z", "+00:00")).astimezone(timezone(timedelta(hours=9))) - # YYYY年MM月DD日形式に変換 - return dt.strftime("%Y年%m月%d日") \ No newline at end of file diff --git a/functions/append-log-to-sheet/source/requirements.txt b/functions/append-log-to-sheet/source/requirements.txt deleted file mode 100755 index e809a11..0000000 --- a/functions/append-log-to-sheet/source/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -functions-framework==3.* -google-cloud-secret-manager -google-api-python-client -google-auth-httplib2 -google-auth-oauthlib \ No newline at end of file diff --git a/functions/create-hubspot-meeting-log/.env_debug b/functions/create-hubspot-meeting-log/.env_debug deleted file mode 100755 index 2cd13fd..0000000 --- a/functions/create-hubspot-meeting-log/.env_debug +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID=datacom-poc -LOCATION=asia-northeast1 -BUCKET=meeting-report-data -KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken -MODE=dev \ No newline at end of file diff --git a/functions/create-hubspot-meeting-log/.env_dev b/functions/create-hubspot-meeting-log/.env_dev deleted file mode 100755 index eb8efeb..0000000 --- a/functions/create-hubspot-meeting-log/.env_dev +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID: datacom-poc -LOCATION: asia-northeast1 -BUCKET: meeting-report-data -KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken -MODE: dev \ No newline at end of file diff --git a/functions/create-hubspot-meeting-log/.env_prod b/functions/create-hubspot-meeting-log/.env_prod deleted file mode 100755 index e23fb91..0000000 --- a/functions/create-hubspot-meeting-log/.env_prod +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID: rational-timing-443808-u0 -LOCATION: asia-northeast1 -BUCKET: meeting-data -KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken -MODE: prod \ No newline at end of file diff --git a/functions/create-hubspot-meeting-log/_scripts/deploy_dev.sh b/functions/create-hubspot-meeting-log/_scripts/deploy_dev.sh deleted file mode 100755 index 02e2e95..0000000 --- a/functions/create-hubspot-meeting-log/_scripts/deploy_dev.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# プロジェクトIDを設定 -PROJECT_ID="datacom-poc" - -# デプロイする関数名 -FUNCTION_NAME="mrt-create-hubspot-meeting-log" - -# 関数のエントリポイント -ENTRY_POINT="handle_request" - -# ランタイム -RUNTIME="python312" - -# リージョン -REGION="asia-northeast1" - -# 環境変数ファイル -ENV_VARS_FILE=".env_dev" - -gcloud auth application-default set-quota-project $PROJECT_ID -gcloud config set project $PROJECT_ID - -# デプロイコマンド -gcloud functions deploy $FUNCTION_NAME \ - --gen2 \ - --region $REGION \ - --runtime $RUNTIME \ - --source=./source \ - --trigger-http \ - --no-allow-unauthenticated \ - --entry-point $ENTRY_POINT \ - --env-vars-file $ENV_VARS_FILE \ No newline at end of file diff --git a/functions/create-hubspot-meeting-log/source/main.py b/functions/create-hubspot-meeting-log/source/main.py deleted file mode 100755 index a6b204a..0000000 --- a/functions/create-hubspot-meeting-log/source/main.py +++ /dev/null @@ -1,200 +0,0 @@ -import functions_framework -from google.cloud import storage, secretmanager -import os -import hubspot -from hubspot.crm.objects.meetings import SimplePublicObjectInputForCreate, ApiException -import requests -import csv -import io -import re -import jaconv -from rapidfuzz import process, fuzz -import json - -CUTOFF = 80 # Fuzzy 閾値 (0-100) -LEGAL_SUFFIX = r'(株式会社|(株)|\(株\)|有限会社|合同会社|Inc\.?|Corp\.?|Co\.?Ltd\.?)' - -cs_client = storage.Client(project=os.getenv("PROJECT_ID")) -sm_client = secretmanager.SecretManagerServiceClient() - -@functions_framework.http -def handle_request(request): - try: - request_json = request.get_json() - print(request_json) - - mode = os.getenv("MODE") # モード(devまたはprod) - title = request_json['title'] - host_id = request_json['host_id'] if mode == 'prod' else 'ksuenaga@datacom.jp' # ホストユーザーID(開発環境では固定値を使用) - starts_at = request_json['starts_at'] - ends_at = request_json['ends_at'] - minutes = request_json['minutes'] - - # タイトルから【】を削除 - title = title.replace("【", "").replace("】", "") - # タイトルから企業名を抽出 - company_name = title.split("様")[0].strip() # "様" で分割して企業名を取得 - print("抽出した企業名:", company_name) - - # 会社名から会社IDを取得 - matched_company_id, matched_company_name = search_company(company_name) - - # マッチしたときだけ処理を行う - if matched_company_id: - # ユーザーIDを取得 - by_email = load_owners() - user_id = None - if host_id in by_email: - user_id = by_email[host_id]['id'] - print("取得したユーザーID:", user_id) - - # 改行コードを
タグに変換 - minutes_html = minutes.replace("\n", "
") - # ミーティングログを作成 - create_meeting_log(matched_company_id, title, user_id, starts_at, ends_at, minutes_html) - - - response_data = { - "matched_company_id": matched_company_id, # マッチした会社ID - "matched_company_name": matched_company_name, # マッチした会社名 - } - return (json.dumps(response_data, ensure_ascii=False), 200, {"Content-Type": "application/json"}) - except ApiException as e: - print("Exception when calling basic_api->create: %s\n" % e) - - -def normalize(name: str) -> str: - """表記ゆれ吸収用の正規化""" - n = jaconv.z2h(name, kana=False, digit=True, ascii=True).lower() - n = re.sub(LEGAL_SUFFIX, '', n) - return re.sub(r'[\s\-・・,,、\.]', '', n) - - -# GCSから会社一覧取得 -def load_componies(): - """ - 毎回 Cloud Storage から CSV を読み込む。 - *応答速度を気にしない* 前提なのでキャッシュしなくても OK。 - """ - - blob = cs_client.bucket(os.getenv("BUCKET")).blob('master/mst_company.csv') - raw = blob.download_as_bytes() # bytes - - recs, by_norm = [], {} - with io.StringIO(raw.decode("utf-8")) as f: - reader = csv.DictReader(f) - for row in reader: - row["norm_name"] = normalize(row["company_name"]) - recs.append(row) - by_norm[row["norm_name"]] = row # 完全一致用ハッシュ - - return recs, by_norm # (list[dict], dict) - - -# GCSから担当者一覧取得 -def load_owners(): - """ - GCS から担当者一覧 CSV を読み込み、 - email -> row 辞書 のマッピングを返す - """ - - blob = cs_client.bucket(os.getenv("BUCKET")).blob('master/mst_owner.csv') - raw = blob.download_as_bytes() # bytes - - by_email = {} - with io.StringIO(raw.decode("utf-8")) as f: - reader = csv.DictReader(f) - for row in reader: - # row に "email" と "user_id" フィールドがある前提 - email = row["email"].strip().lower() - by_email[email] = row - - return by_email - - - -def fuzzy_candidates(norm: str, recs): - """ - norm : 正規化済み検索語 - recs : 会社レコード list[dict] (norm_name 含む) - 戻り値 : list[(score:int, idx:int)] - """ - top = 2 # 上位 2 件を取得 - matches = process.extract( - norm, - [r["norm_name"] for r in recs], - scorer=fuzz.WRatio, - score_cutoff=CUTOFF, - limit=top - ) - print("ファジーマッチ結果:", matches) - if len(matches) == 0: - return None # マッチなしの場合は None を返す - elif len(matches) == 1: - return recs[matches[0][2]] # 上位 1 件のみの場合はそのレコードを返す - else: - if(matches[0][1] == matches[1][1]): - return None # 上位 2 件のスコアが同じ場合は None を返す - return recs[matches[0][2]] # 上位 1 件のみの場合はそのレコードを返す - - -def search_company(company_name): - # -------------------- マスタ読み込み -------------------- - recs, by_norm = load_componies() - norm_company_name = normalize(company_name) - print("正規化した企業名:", norm_company_name) - - matched_company_id = None - matched_company_name = None - # -------------------- 完全一致 -------------------- - if norm_company_name in by_norm: - matched_company_id = by_norm[norm_company_name]["company_id"] - matched_company_name = by_norm[norm_company_name]["company_name"] - - # -------------------- ファジーマッチ複数 -------------------- - else : - result = fuzzy_candidates(norm_company_name, recs) - if result: - matched_company_id = result["company_id"] - matched_company_name = result["company_name"] - - print("マッチした会社ID:", matched_company_id) - print("マッチした会社名:", matched_company_name) - return matched_company_id, matched_company_name - - -def create_meeting_log(company_id ,title, user_id, starts_at, ends_at, minutes): - """ - HubSpot API を使ってミーティングログを作成する。 - """ - access_key = get_access_key() # Secret Manager からアクセストークンを取得 - hs_client = hubspot.Client.create(access_token=access_key) - - properties = { - "hs_timestamp": starts_at, - "hs_meeting_title": title, - "hubspot_owner_id": user_id, - "hs_meeting_body": minutes, - "hs_meeting_start_time": starts_at, - "hs_meeting_end_time": ends_at, - - } - - simple_public_object_input_for_create = SimplePublicObjectInputForCreate( - associations=[{"types":[{"associationCategory":"HUBSPOT_DEFINED","associationTypeId":188}],"to":{"id":company_id}}], - properties=properties - ) - - api_response = hs_client.crm.objects.meetings.basic_api.create(simple_public_object_input_for_create=simple_public_object_input_for_create) - print(api_response) - -# -# SecretManagerからアクセストークンを取得 -# -def get_access_key(): - key_path = os.getenv('KEY_PATH') + "/versions/1" - # アクセストークン取得 - response = sm_client.access_secret_version(name=key_path) - # アクセストークンをデコード - access_token = response.payload.data.decode("UTF-8") - return access_token diff --git a/functions/create-hubspot-meeting-log/source/requirements.txt b/functions/create-hubspot-meeting-log/source/requirements.txt deleted file mode 100755 index db94e68..0000000 --- a/functions/create-hubspot-meeting-log/source/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -functions-framework==3.* -Flask -google-cloud-storage -google-cloud-workflows -google-cloud-secret-manager -hubspot-api-client -rapidfuzz -jaconv \ No newline at end of file diff --git a/functions/create-log-sheet/.env_debug b/functions/create-log-sheet/.env_debug deleted file mode 100755 index a672143..0000000 --- a/functions/create-log-sheet/.env_debug +++ /dev/null @@ -1,5 +0,0 @@ -KEY_PATH=projects/32472615575/secrets/sa-access-google-drive-key -LOG_FOLDER_ID=1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX -MEETING_FOLDER_ID=1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw -HUBSPOT_COMPANY_URL=https://app-na2.hubspot.com/contacts/242960467/record/0-2 -MODE=dev diff --git a/functions/create-log-sheet/.env_dev b/functions/create-log-sheet/.env_dev deleted file mode 100755 index b9a7bd3..0000000 --- a/functions/create-log-sheet/.env_dev +++ /dev/null @@ -1,5 +0,0 @@ -KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key -LOG_FOLDER_ID: 1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX -MEETING_FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw -HUBSPOT_COMPANY_URL: https://app-na2.hubspot.com/contacts/242960467/record/0-2 -MODE: dev diff --git a/functions/create-log-sheet/.env_prod b/functions/create-log-sheet/.env_prod deleted file mode 100755 index d7650d0..0000000 --- a/functions/create-log-sheet/.env_prod +++ /dev/null @@ -1,5 +0,0 @@ -KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key -LOG_FOLDER_ID: 1arL6AxpvA7N6Umg4wdrdAcRWBdKc-Jfb -MEETING_FOLDER_ID: 0AGT_1dSq66qYUk9PVA -HUBSPOT_COMPANY_URL: https://app.hubspot.com/contacts/22400567/record/0-2 -MODE: production diff --git a/functions/create-log-sheet/_scripts/deploy_dev.sh b/functions/create-log-sheet/_scripts/deploy_dev.sh deleted file mode 100755 index d9c2d22..0000000 --- a/functions/create-log-sheet/_scripts/deploy_dev.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# プロジェクトIDを設定 -PROJECT_ID="datacom-poc" - -# デプロイする関数名 -FUNCTION_NAME="mrt-create-log-sheet" - -# 関数のエントリポイント -ENTRY_POINT="handle_request" - -# ランタイム -RUNTIME="python312" - -# リージョン -REGION="asia-northeast1" - -# 環境変数ファイル -ENV_VARS_FILE=".env_dev" - -gcloud auth application-default set-quota-project $PROJECT_ID -gcloud config set project $PROJECT_ID - -# デプロイコマンド -gcloud functions deploy $FUNCTION_NAME \ - --gen2 \ - --region $REGION \ - --runtime $RUNTIME \ - --source=./source \ - --trigger-http \ - --no-allow-unauthenticated \ - --entry-point $ENTRY_POINT \ - --env-vars-file $ENV_VARS_FILE \ No newline at end of file diff --git a/functions/create-log-sheet/source/main.py b/functions/create-log-sheet/source/main.py deleted file mode 100755 index 53b192e..0000000 --- a/functions/create-log-sheet/source/main.py +++ /dev/null @@ -1,218 +0,0 @@ -import functions_framework -from google.cloud import secretmanager -from google.oauth2 import service_account -from googleapiclient.discovery import build -from googleapiclient.errors import HttpError -import json -import os -from datetime import datetime, timezone, timedelta - - -sm_client = secretmanager.SecretManagerServiceClient() - - -SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] -HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"] - -@functions_framework.http -def handle_request(request): - # POSTリクエストの処理 - if request.method != 'POST': - return ('', 405, {'Allow': 'POST', 'Content-Type': 'application/json'}) # メソッドがPOSTでない場合は405エラーを返す - - """Shows basic usage of the Drive Activity API. - - Prints information about the last 10 events that occured the user's Drive. - """ - try: - log_folder_id = os.getenv("LOG_FOLDER_ID") # 共有ドライブID - meeting_folder_id = os.getenv("MEETING_FOLDER_ID") # ミーティングフォルダID - mode = os.getenv("MODE") # モード(devまたはprod) - - service_account_info = get_service_account_info() - # 認証 - credentials = get_credentials(service_account_info) - - # APIクライアントの構築 - drive_service = build("drive", "v3", credentials=credentials) - sheet_service = build("sheets", "v4", credentials=credentials) - - - # 現在日時をJSTに変換 - jst_now = datetime.now(timezone.utc).astimezone(timezone(timedelta(hours=9))) - # JSTの現在日時を文字列に変換 - ym_str = jst_now.strftime("%Y%m") - y_str = jst_now.strftime("%Y") - - - # 年別のフォルダを検索 - target_folder = get_directory_files_dev(drive_service, log_folder_id, y_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, log_folder_id, y_str) - print("target_folder", target_folder) - - year_folder_id = None - if not target_folder: - # フォルダが存在しない場合は新規作成 - year_folder_id = create_new_folder(drive_service, log_folder_id, y_str) - else: - # フォルダが存在する場合はそのIDを使用 - year_folder_id = target_folder[0]['id'] - print("年別のフォルダID:", year_folder_id) - - # スプレッドシートを作成 - spreadsheet_id = create_new_spreadsheet(drive_service, year_folder_id, ym_str) - print("スプレッドシートID:", spreadsheet_id) - # 注意事項追加 - append_log_to_sheet(sheet_service, spreadsheet_id, ["※シート名変更厳禁"]) - # ヘッダーを追加 - append_log_to_sheet(sheet_service, spreadsheet_id, HEADER_VALUES) - - - - return (json.dumps({"status": "success"}, ensure_ascii=False), 200, {"Content-Type": "application/json"}) - - except HttpError as error: - # TODO(developer) - Handleerrors from drive activity API. - print(f"An error occurred: {error}") - - -# -# SecretManagerから秘密鍵を取得 -# -def get_service_account_info(): - key_path = os.getenv('KEY_PATH') + "/versions/1" - # 秘密鍵取得 - response = sm_client.access_secret_version(name=key_path) - # 秘密鍵の値をデコード - secret_key = response.payload.data.decode("UTF-8") - return json.loads(secret_key) - -# Google Drive認証 -def get_credentials(service_account_info): - credentials = service_account.Credentials.from_service_account_info( - service_account_info, - scopes=SCOPES - ) - return credentials - - -# 開発用マイドライブからのファイルを取得 -def get_directory_files_dev(service,shared_folder_id, filename): - """ - 対象のディレクトリ配下からファイル名で検索した結果を配列で返す - :param filename: ファイル名 - :param directory_id: ディレクトリID - :param pages_max: 最大ページ探索数 - :return: ファイルリスト - """ - items = [] - page = 0 - pages_max = 10 # 最大ページ数 - while True: - page += 1 - if page == pages_max: - break - results = service.files().list( - corpora="user", - includeItemsFromAllDrives=True, - includeTeamDriveItems=True, - q=f"'{shared_folder_id}' in parents and name = '{filename}' and trashed = false", - supportsAllDrives=True, - pageSize=10, - fields="nextPageToken, files(id, name)").execute() - items += results.get("files", []) - - page_token = results.get('nextPageToken', None) - if page_token is None: - break - return items - -# 本番用共有ドライブからのファイルを取得 -def get_directory_files_prod(service,shared_folder_id,sub_folder_id,filename): - """ - 対象のディレクトリ配下からファイル名で検索した結果を配列で返す - :param filename: ファイル名 - :param directory_id: ディレクトリID - :param pages_max: 最大ページ探索数 - :return: ファイルリスト - """ - items = [] - page = 0 - pages_max = 10 # 最大ページ数 - while True: - page += 1 - if page == pages_max: - break - results = service.files().list( - corpora="drive", - driveId=shared_folder_id, - includeItemsFromAllDrives=True, - includeTeamDriveItems=True, - q=f"'{sub_folder_id}' in parents and name = '{filename}' and trashed = false", - supportsAllDrives=True, - pageSize=10, - fields="nextPageToken, files(id, name, parents)").execute() - items += results.get("files", []) - - page_token = results.get('nextPageToken', None) - if page_token is None: - break - return items - -def create_new_folder(service, sub_folder_id, title): - """ - Google Drive APIを使用して新しいフォルダを作成する - :param service: Google Drive APIのサービスオブジェクト - :param title: フォルダのタイトル - :return: 作成したフォルダのID - """ - file_metadata = { - "name": title, - "parents": [sub_folder_id], # 共有ドライブのIDを指定 - "mimeType": "application/vnd.google-apps.folder", - } - - result = service.files().create(body=file_metadata, fields="id", supportsAllDrives=True).execute() - return result.get('id') - - -def create_new_spreadsheet(service,folder_id,title): - """ - Google Sheets APIを使用して新しいスプレッドシートを作成する - :param service: Google Sheets APIのサービスオブジェクト - :param title: スプレッドシートのタイトル - :return: 作成したスプレッドシートのID - """ - file_metadata = { - 'name': title, - 'parents': [folder_id], # 作成したフォルダのIDを指定 - 'mimeType': 'application/vnd.google-apps.spreadsheet', - } - result = ( - service.files() - .create(body=file_metadata, fields="id", supportsAllDrives=True) - .execute() - ) - return result.get("id") - - -def append_log_to_sheet(service, spreadsheet_id, row_data): - """ - Google Sheets APIを使用してスプレッドシートにログを追加する - :param service: Google Sheets APIのサービスオブジェクト - :param spreadsheet_id: スプレッドシートのID - :param row_data: 追加するログデータ(リスト形式) - """ - body = { - 'values': [row_data] - } - - # スプレッドシートにログを追加 - result = service.spreadsheets().values().append( - spreadsheetId=spreadsheet_id, - range='Sheet1', - valueInputOption="USER_ENTERED", - insertDataOption='INSERT_ROWS', - body=body, - ).execute() - print(f"{result.get('updates').get('updatedCells')} cells appended.") - diff --git a/functions/create-log-sheet/source/requirements.txt b/functions/create-log-sheet/source/requirements.txt deleted file mode 100755 index e809a11..0000000 --- a/functions/create-log-sheet/source/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -functions-framework==3.* -google-cloud-secret-manager -google-api-python-client -google-auth-httplib2 -google-auth-oauthlib \ No newline at end of file diff --git a/functions/export-companies-to-gcs/.env_debug b/functions/export-companies-to-gcs/.env_debug deleted file mode 100755 index 2da85c5..0000000 --- a/functions/export-companies-to-gcs/.env_debug +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID=datacom-poc -LOCATION=asia-northeast1 -BUCKET=meeting-report-data -OBJECT=master/mst_company.csv -KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken \ No newline at end of file diff --git a/functions/export-companies-to-gcs/.env_dev b/functions/export-companies-to-gcs/.env_dev deleted file mode 100755 index 93c6a3e..0000000 --- a/functions/export-companies-to-gcs/.env_dev +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID: datacom-poc -LOCATION: asia-northeast1 -BUCKET: meeting-report-data -OBJECT: master/mst_company.csv -KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken diff --git a/functions/export-companies-to-gcs/.env_prod b/functions/export-companies-to-gcs/.env_prod deleted file mode 100755 index 0ad8e08..0000000 --- a/functions/export-companies-to-gcs/.env_prod +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID: rational-timing-443808-u0 -LOCATION: asia-northeast1 -BUCKET: meeting-data -OBJECT: master/mst_company.csv -KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken diff --git a/functions/export-companies-to-gcs/source/main.py b/functions/export-companies-to-gcs/source/main.py deleted file mode 100755 index cb434c1..0000000 --- a/functions/export-companies-to-gcs/source/main.py +++ /dev/null @@ -1,87 +0,0 @@ -import functions_framework -from google.cloud import storage, secretmanager -import os -import hubspot -from hubspot.crm.objects.meetings import ApiException -import csv -import io -import json - -cs_client = storage.Client() -sm_client = secretmanager.SecretManagerServiceClient() - -@functions_framework.http -def handle_request(request): - try: - # 会社一覧取得 - companies = fetch_all_companies() - # メモリ上で CSV を生成 - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer) - # ヘッダー行 - writer.writerow(["company_id", "company_name"]) - # 各行を書き込み - for row in companies: - company_id = row['properties']['hs_object_id'] - company_name = row['properties']['name'] - writer.writerow([company_id, company_name]) - - # Cloud Storage にアップロード - upload_to_gcs(csv_buffer) - return 'success', 200 - except ApiException as e: - print("Exception when calling basic_api->create: %s\n" % e) - return (json.dumps("", ensure_ascii=False), 500, {"Content-Type": "application/json"}) - -def fetch_all_companies(): - """ - Companies API の get_page をページネーション付きで呼び出し、 - 全オブジェクトをリストで返す。 - """ - access_key = get_access_key() # Secret Manager からアクセストークンを取得 - hs_client = hubspot.Client.create(access_token=access_key) - - all_companies = [] - after = None - limit = 100 # 1 回あたりの取得件数(最大 100) - - while True: - # get_page の基本呼び出し - response = hs_client.crm.companies.basic_api.get_page( - limit=limit, - archived=False, - after=after - ) - - # レスポンスから companies の配列を追加 - if response.results: - all_companies.extend([c.to_dict() for c in response.results]) - - # 次ページがない場合はループ終了 - paging = response.paging - if not paging or not paging.next or not paging.next.after: - break - - # next.after をセットして次ループへ - after = paging.next.after - - return all_companies - -def upload_to_gcs(data): - """ - メモリ上の CSV データを Cloud Storage にアップロード - """ - bucket = cs_client.bucket(os.getenv("BUCKET")) - blob = bucket.blob(os.getenv("OBJECT")) - blob.upload_from_string(data.getvalue(), content_type='text/csv') - -# -# SecretManagerからアクセストークンを取得 -# -def get_access_key(): - key_path = os.getenv('KEY_PATH') + "/versions/1" - # アクセストークン取得 - response = sm_client.access_secret_version(name=key_path) - # アクセストークンをデコード - access_token = response.payload.data.decode("UTF-8") - return access_token diff --git a/functions/export-companies-to-gcs/source/requirements.txt b/functions/export-companies-to-gcs/source/requirements.txt deleted file mode 100755 index 60c7aa0..0000000 --- a/functions/export-companies-to-gcs/source/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -functions-framework==3.* -Flask -google-cloud-storage -google-cloud-secret-manager -hubspot-api-client \ No newline at end of file diff --git a/functions/export-owners-to-gcs/.env_debug b/functions/export-owners-to-gcs/.env_debug deleted file mode 100755 index d59e97c..0000000 --- a/functions/export-owners-to-gcs/.env_debug +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID=datacom-poc -LOCATION=asia-northeast1 -BUCKET=meeting-report-data -OBJECT=master/mst_owner.csv -KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken \ No newline at end of file diff --git a/functions/export-owners-to-gcs/.env_dev b/functions/export-owners-to-gcs/.env_dev deleted file mode 100755 index 624c47e..0000000 --- a/functions/export-owners-to-gcs/.env_dev +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID: datacom-poc -LOCATION: asia-northeast1 -BUCKET: meeting-report-data -OBJECT: master/mst_owner.csv -KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken \ No newline at end of file diff --git a/functions/export-owners-to-gcs/.env_prod b/functions/export-owners-to-gcs/.env_prod deleted file mode 100755 index 470780f..0000000 --- a/functions/export-owners-to-gcs/.env_prod +++ /dev/null @@ -1,5 +0,0 @@ -PROJECT_ID: rational-timing-443808-u0 -LOCATION: asia-northeast1 -BUCKET: meeting-data -OBJECT: master/mst_owner.csv -KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken diff --git a/functions/export-owners-to-gcs/source/main.py b/functions/export-owners-to-gcs/source/main.py deleted file mode 100755 index 9d822b5..0000000 --- a/functions/export-owners-to-gcs/source/main.py +++ /dev/null @@ -1,90 +0,0 @@ -import functions_framework -from google.cloud import storage, secretmanager -import os -import hubspot -from hubspot.crm.objects.meetings import ApiException -import csv -import io -import json - -cs_client = storage.Client() -sm_client = secretmanager.SecretManagerServiceClient() - - -@functions_framework.http -def handle_request(request): - try: - # 会社一覧取得 - owners = fetch_all_owners() - # メモリ上で CSV を生成 - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer) - # ヘッダー行 - writer.writerow(["id", "email"]) - # 各行を書き込み - for row in owners: - user_id = row['id'] - email = row['email'] - writer.writerow([user_id, email]) - - # Cloud Storage にアップロード - upload_to_gcs(csv_buffer) - return (json.dumps('', ensure_ascii=False), 200, {"Content-Type": "application/json"}) - except ApiException as e: - print("Exception when calling basic_api->create: %s\n" % e) - - - return (json.dumps("", ensure_ascii=False), 200, {"Content-Type": "application/json"}) - -def fetch_all_owners(): - """ - Companies API の get_page をページネーション付きで呼び出し、 - 全オブジェクトをリストで返す。 - """ - access_key = get_access_key() # Secret Manager からアクセストークンを取得 - hs_client = hubspot.Client.create(access_token=access_key) - - all_owners = [] - after = None - limit = 100 # 1 回あたりの取得件数(最大 100) - - while True: - # get_page の基本呼び出し - response = hs_client.crm.owners.owners_api.get_page( - limit=limit, - archived=False, - after=after - ) - - # レスポンスから companies の配列を追加 - if response.results: - all_owners.extend([c.to_dict() for c in response.results]) - - # 次ページがない場合はループ終了 - paging = response.paging - if not paging or not paging.next or not paging.next.after: - break - - # next.after をセットして次ループへ - after = paging.next.after - - return all_owners - -def upload_to_gcs(data): - """ - メモリ上の CSV データを Cloud Storage にアップロード - """ - bucket = cs_client.bucket(os.getenv("BUCKET")) - blob = bucket.blob(os.getenv("OBJECT")) - blob.upload_from_string(data.getvalue(), content_type='text/csv') - -# -# SecretManagerからアクセストークンを取得 -# -def get_access_key(): - key_path = os.getenv('KEY_PATH') + "/versions/1" - # アクセストークン取得 - response = sm_client.access_secret_version(name=key_path) - # アクセストークンをデコード - access_token = response.payload.data.decode("UTF-8") - return access_token diff --git a/functions/export-owners-to-gcs/source/requirements.txt b/functions/export-owners-to-gcs/source/requirements.txt deleted file mode 100755 index 60c7aa0..0000000 --- a/functions/export-owners-to-gcs/source/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -functions-framework==3.* -Flask -google-cloud-storage -google-cloud-secret-manager -hubspot-api-client \ No newline at end of file diff --git a/functions/generate-meeting-minutes/.env_debug b/functions/generate-meeting-minutes/.env_debug deleted file mode 100755 index 15e4399..0000000 --- a/functions/generate-meeting-minutes/.env_debug +++ /dev/null @@ -1,3 +0,0 @@ -MIITEL_URL=https://datacom.miitel.jp/ -PROJECT_ID=datacom-poc -MODEL_ID=gemini-2.5-flash diff --git a/functions/generate-meeting-minutes/.env_dev b/functions/generate-meeting-minutes/.env_dev deleted file mode 100755 index 0335b9f..0000000 --- a/functions/generate-meeting-minutes/.env_dev +++ /dev/null @@ -1,3 +0,0 @@ -MIITEL_URL: https://datacom.miitel.jp/ -PROJECT_ID: datacom-poc -MODEL_ID: gemini-2.5-flash \ No newline at end of file diff --git a/functions/generate-meeting-minutes/.env_prod b/functions/generate-meeting-minutes/.env_prod deleted file mode 100755 index 88be3ae..0000000 --- a/functions/generate-meeting-minutes/.env_prod +++ /dev/null @@ -1,3 +0,0 @@ -MIITEL_URL: https://datacom.miitel.jp/ -PROJECT_ID: rational-timing-443808-u0 -MODEL_ID: gemini-2.5-flash diff --git a/functions/generate-meeting-minutes/_scripts/deploy_dev.sh b/functions/generate-meeting-minutes/_scripts/deploy_dev.sh deleted file mode 100755 index d91fb99..0000000 --- a/functions/generate-meeting-minutes/_scripts/deploy_dev.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -# プロジェクトIDを設定 -PROJECT_ID="datacom-poc" - -# デプロイする関数名 -FUNCTION_NAME="mrt-generate-meeting-minutes" - -# 関数のエントリポイント -ENTRY_POINT="handle_request" - -# ランタイム -RUNTIME="python312" - -# リージョン -REGION="asia-northeast1" - -# 環境変数ファイル -ENV_VARS_FILE=".env_dev" - -gcloud auth application-default set-quota-project $PROJECT_ID -gcloud config set project $PROJECT_ID - -# デプロイコマンド -gcloud functions deploy $FUNCTION_NAME \ - --gen2 \ - --region $REGION \ - --runtime $RUNTIME \ - --source=./source \ - --trigger-http \ - --cpu=0.5 \ - --memory=1Gi \ - --no-allow-unauthenticated \ - --entry-point $ENTRY_POINT \ - --env-vars-file $ENV_VARS_FILE \ No newline at end of file diff --git a/functions/generate-meeting-minutes/source/main.py b/functions/generate-meeting-minutes/source/main.py deleted file mode 100755 index 5f88379..0000000 --- a/functions/generate-meeting-minutes/source/main.py +++ /dev/null @@ -1,132 +0,0 @@ -import functions_framework -import vertexai -from vertexai.generative_models import GenerativeModel, ChatSession -from google.cloud import storage -from google.cloud import secretmanager -import json -import requests -import os -from datetime import datetime, timezone, timedelta -import gzip - - -# Storage クライアントを作成 -storage_client = storage.Client() -sm_client = secretmanager.SecretManagerServiceClient() - -@functions_framework.http -def handle_request(request): - # POSTリクエストの処理 - if request.method != 'POST': - return ({'error': 'Method not allowed'}, 405, {'Content-Type': 'application/json'}) - try: - request_json = request.get_json() - print(request_json) - - project_id = os.getenv("PROJECT_ID") - miitel_url = os.getenv("MIITEL_URL") - - video_info = request_json["video"] - - access_permission = video_info["access_permission"] - video_id = video_info["id"] # 会議履歴ID - host_name = video_info["host"]["user_name"] # ホストユーザー名 - host_id = video_info["host"]["login_id"] # ホストユーザーID - starts_at = video_info["starts_at"] # 開始日時 - ends_at = video_info["ends_at"] # 終了日時 - - video_url = miitel_url + "app/video/" + video_id # 会議履歴URL - title = video_info["title"] # 会議タイトル - print("会議タイトル",title) - - # 閲覧制限のない会議のみ生成 - if access_permission != "EVERYONE": - return (json.dumps({"status": "end"}, ensure_ascii=False), 200, {"Content-Type": "application/json"}) - - # 社外ミーティングのみ議事録作成 - if "様" not in title or "社内" in title: - return (json.dumps({"status": "end"}, ensure_ascii=False), 200, {"Content-Type": "application/json"}) - - # 議事録ファイル名 - jst_date_str = generate_jst_date(starts_at) # 開始日時をJSTに変換 - file_name = f"{jst_date_str} {title} {host_name}" - print(file_name) - # 議事録作成 - speech_recognition = video_info["speech_recognition"]["raw"] # 文字起こしデータ - minutes_text = create_minutes(project_id,speech_recognition) - print("議事録作成完了") - - # テキスト内容をセット - minutes = f"会議履歴URL:{video_url}\n" - minutes += f"担当者:{host_name}\n\n" - minutes += minutes_text - - response_data = { - "status": "next", # ステータス - "title": title, # 会議タイトル - "host_id": host_id, # ホストユーザーID - "host_name": host_name, # ホストユーザー名 - "video_url": video_url, # 会議履歴URL - "starts_at": starts_at, # 開始日時 - "ends_at": ends_at, # 終了日時 - "file_name": file_name, # 議事録ファイル名 - "minutes": minutes, # 議事録内容 - } - - return (json.dumps(response_data, ensure_ascii=False), 200, {"Content-Type": "application/json"}) - except Exception as e: - # エラー - error_response = { - "error": str(e) #エラー内容 - } - print(str(e)) - return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー - - -def generate_jst_date(starts_at): - - # UTCの文字列をdatetimeオブジェクトに変換 - utc_datetime = datetime.fromisoformat(starts_at) - - # JSTへの変換 - jst_timezone = timezone(timedelta(hours=9)) # JSTはUTC+9 - jst_datetime = utc_datetime.astimezone(jst_timezone) - - # yyyy-MM-dd形式にフォーマット - jst_date_str = jst_datetime.strftime("%Y年%m月%d日") - return jst_date_str - - -def create_minutes(project_id,speech_recognition): - location = "us-central1" - model_id = os.getenv("MODEL_ID") - # print("モデルID:", model_id) - - vertexai.init(project=project_id, location=location) - model = GenerativeModel(model_id) - # print("モデル初期化完了") - - prompt = f""" -あなたは議事録作成のプロフェッショナルです。以下の「文字起こし結果」は営業マンが録音した商談の文字起こしです。以下の制約条件に従い、最高の商談報告の議事録を作成してください。 - -制約条件: -1. 文字起こし結果にはAIによる書き起こしミスがある可能性を考慮してください。 -2. 冒頭に主要な「決定事項」と「アクションアイテム」をまとめてください。 -3. 議論のポイントを議題ごとに要約してください。 -4. 見出しや箇条書きを用いて、情報が探しやすい構造で簡潔かつ明瞭に記述してください。 -5. 要約は500文字以内に収めてください。 -6. 箇条書き形式で簡潔にまとめてください。 -7. マークダウン記法は使わず、各項目を「■」や「・」等を使って見やすくしてください。 - -文字起こし結果: -{speech_recognition} - """ - - - # print("-------------プロンプト-------------") - # print(prompt[:1000]) - # print("-------------議事録作成-------------") - response = model.generate_content(prompt) - # print(response.text) - return response.text - diff --git a/functions/generate-meeting-minutes/source/requirements.txt b/functions/generate-meeting-minutes/source/requirements.txt deleted file mode 100755 index fab01d8..0000000 --- a/functions/generate-meeting-minutes/source/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -functions-framework==3.* -google-cloud-storage -google-cloud-aiplatform -google-cloud-secret-manager -pydrive2 \ No newline at end of file diff --git a/functions/generate_minutes/.DS_Store b/functions/generate_minutes/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..bc676a440178efeb505237d310fc8313da0ff9e5 GIT binary patch literal 8196 zcmeHMTWl0n7(V~B&>2VIbZMb5rEIzo3vyXokjkZOyRAYmwsc#%ZGmNWXP}c#XX?)E z7O2(wL_*c*i`N(vjYfUI2dSb@CdLF4;{#Tryl7(LgYx3_#l-lZIkTlV9yJ8!By;}r z-_Gp+@Asdx=PV%vnsV9$gj5hhM0^;fGImQerf1KhCis$Kq=5b;J?4z2O*@VJMb4Ec}D=G z>q8jc%{UIU@1c-it2u|$-`!gQ4kYa;(p=G<&Sc&RLq{`6jqRltd{- zE5^rnN2A-qyXxyEwuQ$Zs%7K42PY;Zu{^SKUw3NQ9I@;RJOuEU0kUF2NKdNYQ4683= zo4FASHKiHV%;eBmZ#2z>agJMOvbJe<*^c2Fw$-0>^)oKhv5DrXk^_#Nb>rS-NdxES zRw4F|&Qa#<9M!W9Qc1eFa_z=VHQQ_V?2ok^?l>~Dik60CxqMAu-f)bxsrRc+Mz=J> zI^8>FXztJeX4GZ-Z_Sl^FL85O1&=;<;wBo^i3GEQcDwy|v>QPM|L zB(qBgmYRR{CtO^*)G8l2bhx#x{kAQ&?^d$nT3uGYW^H)$)}6cR4>Zn#kfXK$)Vc8Y zFh8$SJj{ETSb$dsJf%{u+E^&Vn+3=VvKNa*KO?fbr6s;~WRr-+LYMh$%@$qdVN$_k<;vPR3CoWb`Rw*R$O32NefuRWL|QzfAkrJ;Dwz^V@~3HT!D+70{|x0{ zkgMcdMEMOw`9DwuBCJJZ*C4X%p}|9U99rN}=!7olh8{Q$Cm;nX;@pH0uwWFji1Nn~ z=TE{@Z~>;^X~g=A@DjWXufl7H^mpK0cn>~+OE3)|`I!F_5q}+iT!7+^`6z}z=At;C z$Xcdtokm#!_4Cc|D6WV{Q^C&vEw{e^zoTLff(bGZWMF; res.send("こんにちは!")); +// Process Request From Miitel Webhook +router.post("/miitel", async (req, res) => { + try { + const body = req.body; + const parsedBody = MiiTelWebhookSchema.safeParse(body); + if (!parsedBody.success) throw createCustomError("ZOD_FAILED"); -router.post("/miitel", async(req, res) => { - const body = req.body; - // await storageController.saveToGCS("request_log",'test', JSON.stringify(req.body)); + const videoInfo = parsedBody.data.video; + const gzipped = zlib.gzipSync(JSON.stringify(body)); + await storageController.saveToGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${videoInfo.id}.json.gz`, gzipped, 'application/gzip'); - const parsedBody = MiiTelWebhookSchema.safeParse(body); - if(!parsedBody.success) { - console.error("Invalid webhook body:", parsedBody.error); - return; + await processRequest(videoInfo); + + res.status(200).send("ok"); + } catch(err) { + res.status(400).send("Invalid webhook body"); } - console.log("miitel webhook received:", parsedBody.data.video.id); - - await processRequest(parsedBody.data.video); - - res.send("こんにちは!"); }); -router.post("/getLog", async(req, res) => { +// Update Master Data And Check Google Drive Folder +router.post("/dailyBatch", async (req, res) => { + try { + console.log("Starting daily batch process..."); + // export companies to GCS + const companies = await hubspotController.getCompanies(); + if(!companies) throw createCustomError("GET_OWNERS_FAILED"); + await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, JSON.stringify(companies), 'application/json'); + + // export owners to GCS + const owners = await hubspotController.getOwners(); + if(!owners) throw createCustomError("GET_COMPANIES_FAILED"); + await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME, JSON.stringify(owners), 'application/json'); + + // check folders in Google Drive + res.status(200).send("Daily batch executed."); + + } catch (error) { + console.error("Error in daily batch:", error); + } +}); + +// Check Log By Meeting ID +router.post("/getLog", async (req, res) => { console.log(req.body); const meetingId = req.body.meetingId; const exist = await storageController.existsInGCS("request_log", "test.json.gz"); @@ -32,4 +60,33 @@ router.post("/getLog", async(req, res) => { res.send(log); }); + + +// router.post("/deleteFile", async (req, res) => { +// console.log(req.body); +// const fileId = req.body.fileId; +// const googleAuth = await googleDriveController.getAuth(); +// const driveClilent = googleDriveController.getDriveClient(googleAuth); +// await googleDriveController.deleteFile(driveClilent, fileId); +// res.send('ok'); +// }); + +// router.post("/test", async (req, res) => { +// try { + +// const googleAuth = await googleDriveController.getAuth(); +// const driveClilent = googleDriveController.getDriveClient(googleAuth); +// const sheetsClient = googleDriveController.getSheetsClient(googleAuth); +// const folderId = await googleDriveController.searchFileIdByFileName(driveClilent, MINUTES_CREATION_HISTORY_FOLDER_ID, '2025'); +// if(!folderId) throw new Error() +// // console.log(fileId); +// // const sheetId = await googleDriveController.getLogSheetId(driveClilent, sheetsClient, folderId, 'test1'); +// // console.log('sheet id : ', sheetId); +// res.send("ok"); +// } catch (error) { +// console.error("Error in /test endpoint:", error); +// res.status(500).send("Error in /test endpoint"); +// } +// }); + export default router; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/date.ts b/functions/generate_minutes/src/logics/date.ts index a7461c1..e646116 100644 --- a/functions/generate_minutes/src/logics/date.ts +++ b/functions/generate_minutes/src/logics/date.ts @@ -21,5 +21,12 @@ export const dateController = { return formatted.replace(/(y+)/g, (v) => date.getFullYear().toString().slice(-v.length) ); - } + }, + getCurrentJstTime: (format: string) => { + const utcDate = new Date().toUTCString(); + const jstDate = dateController.convertToJst(utcDate); + const jstStr = dateController.getFormattedDate(jstDate, format); + return jstStr; + // return dateController.getFormattedDate(utcDate, "yyyy/MM/dd hh:mm:ss"); + }, }; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/error.ts b/functions/generate_minutes/src/logics/error.ts new file mode 100644 index 0000000..0a742b2 --- /dev/null +++ b/functions/generate_minutes/src/logics/error.ts @@ -0,0 +1,29 @@ +import { Response } from "express"; +import z from "zod"; +import { ERROR_DEFINITIONS, ErrorKey } from "../stores/errorCodes"; + +const CustomErrorSchema = z.object({ + code: z.string(), + message: z.string(), + statusCode:z.number(), +}); + +export type CustomError = z.infer; + +export const createCustomError = (key: ErrorKey): CustomError => { + const errorInfo = ERROR_DEFINITIONS[key]; + return CustomErrorSchema.parse(errorInfo); +}; + +export const responseError = (error: any, res: Response | null = null) => { + if (!CustomErrorSchema.safeParse(error).success) { + console.error(error); + console.error("========== Unknown Error =========="); + if(res) return res.status(500).send('Internal Server Error'); + } + const parsedError = CustomErrorSchema.parse(error); + console.error("========== Custom Error =========="); + console.error(`Error Code: ${parsedError.code}\n Message: ${parsedError.message}`); + if(res) return res.status(parsedError.statusCode).send(parsedError.message); +} + diff --git a/functions/generate_minutes/src/logics/file.ts b/functions/generate_minutes/src/logics/file.ts new file mode 100644 index 0000000..39411db --- /dev/null +++ b/functions/generate_minutes/src/logics/file.ts @@ -0,0 +1,53 @@ +import { create } from "domain"; +import { dateController } from "./date"; +import path, { join } from "path"; +import archiver from "archiver"; +import { googleDriveController } from "./googleDrive"; +import fs from "fs"; + + + +export const fileController = { + createMinutesFileName: (title: string, hostName: string, jstStartsAt: Date): string => { + const dateStr = dateController.getFormattedDate(jstStartsAt, "yyyy年MM月dd日"); + const fileName = `${dateStr} ${title} ${hostName}`; + return fileName; + }, + extractCompanyNameFromTitle: (title: string) => { + const normalizedTitle = title.replace("【", "").replace("】", ""); + const companyName = normalizedTitle.split("様")[0]; + return companyName + }, + createMinutesContent: (videoUrl: string, hostName: string, minutes: string): string => { + let minutesContent = `会議履歴URL:${videoUrl}\n`; + minutesContent += `担当者:${hostName}\n\n`; + minutesContent += minutes; + return minutesContent; + }, + createZip: async (body: any, outputPath: string, fileName: string) => { + console.log(outputPath); + await new Promise((resolve, reject) => { + const output = fs.createWriteStream(outputPath); + const archive = archiver('zip', { + zlib: { level: 9 } + }); + + output.on('close', () => { + console.log(archive.pointer() + ' total bytes'); + console.log('archiver has been finalized and the output file descriptor has closed.'); + resolve(true); + }); + + archive.on('error', (err) => { + reject(err); + }); + + archive.pipe(output); + archive.append(JSON.stringify(body), { name: fileName + '.json' }); + archive.finalize(); + }) + console.log("ZIP created"); + return; + }, + +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/fuzzyMatch.ts b/functions/generate_minutes/src/logics/fuzzyMatch.ts new file mode 100644 index 0000000..211b172 --- /dev/null +++ b/functions/generate_minutes/src/logics/fuzzyMatch.ts @@ -0,0 +1,62 @@ +import { search } from "fast-fuzzy"; +import { storageController } from "./storage"; +import { CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, LEGAL_SUFFIX } from "../../serverConfig"; +import { Company, CompanySchema } from "./hubspot"; +import z from "zod"; + + +export const fuzzyMatchController = { + searchMatchedCompany: async(companyName: string): Promise => { + try { + const companiesJson = await storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME); + if(!companiesJson) return null; + const parsedCompanies = z.array(CompanySchema).safeParse(JSON.parse(companiesJson)); + if(!parsedCompanies.success) return null; + + const normalizedCompanyName = fuzzyMatchController.normalizeCompanyName(companyName); + const normalizedCompanies: Company[] = parsedCompanies.data.map((c) => CompanySchema.parse({ + id: c.id, + name: fuzzyMatchController.normalizeCompanyName(c.name), + })); + + // Exact Match + const exactMatchedCompany = fuzzyMatchController.searchExactMatchedCompany(normalizedCompanyName, normalizedCompanies); + // console.log(exactMatchedCompanyId); + if(exactMatchedCompany) return exactMatchedCompany; + + // Fuzzy Match + const results = search( + fuzzyMatchController.normalizeCompanyName(companyName), + parsedCompanies.data, + { + keySelector: (obj) => fuzzyMatchController.normalizeCompanyName(obj.name), + returnMatchData: true, + threshold: 0.8, + }, + ); + console.log("===== Search Results ====="); + console.log(results); + if(results.length <= 0) return null; + if(results.length === 1) return results[0].item; + if(results.length > 1) { + // 同スコアが複数存在 + if(results[0].score === results[1].score) return null; + // トップが単独の場合のみ + return results[0].item; + } + return null; + } catch(error) { + console.error(error); + return null; + } + }, + normalizeCompanyName: (companyName: string) => { + return companyName.replace(LEGAL_SUFFIX, ''); + }, + searchExactMatchedCompany: (companyName: string, companies: Company[]): Company | null => { + for(const company of companies) { + if(companyName === company.name) return company; + }; + return null; + }, +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/googleDrive.ts b/functions/generate_minutes/src/logics/googleDrive.ts index bf13ea6..50bd69c 100644 --- a/functions/generate_minutes/src/logics/googleDrive.ts +++ b/functions/generate_minutes/src/logics/googleDrive.ts @@ -1,36 +1,226 @@ -import { authenticate } from "@google-cloud/local-auth"; -import { JSONClient } from "google-auth-library/build/src/auth/googleauth"; -import { google } from "googleapis"; -import path from "path"; +import { docs_v1, drive_v3, google, sheets_v4 } from "googleapis"; +import fs from "fs"; +import { CREDENTIALS_PATH, DEBUG, FOLDER_MIMETYPE, LOG_SHEET_HEADER_VALUES, SHEET_MIMETYPE } from "../../serverConfig"; +import z from "zod"; const SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] -const CREDENTIALS_PATH = path.join(process.cwd(), 'credentials.json'); +const MAX_RETRY = 3; + +export const LogRowDataSchema = z.object({ + timestamp: z.string(), + meetingDate: z.string(), + title: z.string(), + matchedCompanyName: z.string(), + ownerName: z.string(), + meetingUrl: z.string(), + documentUrl: z.string(), + hubspotUrl: z.string(), +}); + +export type LogRowData = z.infer export const googleDriveController = { - getAuth: async():Promise => { - const auth = await new google.auth.GoogleAuth({ - keyFile: CREDENTIALS_PATH, - scopes: SCOPES, - }); - return auth; + getAuth: async (): Promise => { + try { + const credentials = JSON.parse(process.env.SEARVICE_ACCOUNT_CREDENTIALS || "{}"); + console.log(credentials) + const auth = await new google.auth.GoogleAuth({ + credentials: credentials, + scopes: SCOPES, + }); + if (!auth) return null; + return auth; + } catch (error) { + console.error("Error obtaining Google Auth:", error); + return null; + } }, - checkConnection: async() => { - const auth = await googleDriveController.getAuth(); + getDriveClient: (auth: any): drive_v3.Drive => { // console.log("Google Drive client authenticated."); - const drive = google.drive({ version: "v3", auth: auth}); - const folder = '1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw'; - const res = await drive.files.list({ - q: `'${folder}' in parents`, + const drive = google.drive({ version: "v3", auth: auth }); + return drive; + }, + getSheetsClient: (auth: any): sheets_v4.Sheets => { + const sheets = google.sheets({ version: "v4", auth: auth }); + return sheets; + }, + getDocsClient: (auth: any): docs_v1.Docs => { + const docs = google.docs({ version: "v1", auth: auth }); + return docs; + }, + + uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string): Promise => { + try { + console.log("Uploading file to Google Drive:", filePath); + const response = await driveClient.files.create({ + requestBody: { + name: fileName, + parents: [folderId], + }, + media: { + mimeType: "application/zip", + body: fs.createReadStream(filePath), + }, + }); + console.log("File uploaded, Id:", response.data.id); + fs.unlinkSync(filePath); + return response.data.id; + } catch (error) { + console.error("Error uploading file:", error); + fs.unlinkSync(filePath); + return null; + } + }, + getFolderId: async (driveClient: drive_v3.Drive, folderId: string, fileName: string): Promise => { + try { + const existsFolderId = await googleDriveController.searchFileIdByFileName(driveClient, folderId, fileName); + if(existsFolderId) return existsFolderId; + console.log('=== Create New Folder ===') + const newFolderId = googleDriveController.createNewFile(driveClient, folderId, fileName, FOLDER_MIMETYPE); + if(!newFolderId) return null; + return newFolderId; + } catch (error) { + console.error('Error searching files:', error); + return null; + } + }, + searchFileIdByFileName: async (driveClient: drive_v3.Drive, folderId: string, fileName: string): Promise => { + try { + const params = googleDriveController.getSearchFileParamsByDebugMode(folderId); + const res = await driveClient.files.list(params); + console.log("Files:"); + console.log(res.data.files); + if(!res.data.files) return null; + + for(const file of res.data.files) { + if(fileName === file.name) { + if(!file.id) return null; + return file.id; + } + } + return null; + } catch (error) { + console.error('Error searching files:', error); + return null; + } + }, + getSearchFileParamsByDebugMode: (folderId: string): drive_v3.Params$Resource$Files$List => { + if(DEBUG) { + return { + corpora: 'user', + q: `'${folderId}' in parents`, + pageSize: 10, + fields: "files(id, name)", + includeItemsFromAllDrives: true, + includeTeamDriveItems: true, + supportsAllDrives: true + } + } + return { + corpora: 'drive', + driveId: process.env.GOOGLE_DRIVE_FOLDER_ID, + q: `'${folderId}' in parents`, pageSize: 10, fields: "files(id, name)", - }); - console.log("Files:"); - console.log(res.data.files); + includeItemsFromAllDrives: true, + includeTeamDriveItems: true, + supportsAllDrives: true + } }, - uploadFile: async() => { + createNewFile: async (driveClient: drive_v3.Drive, folderId: string, fileName: string, mimeType: string): Promise => { + try { + const requestBody = { + name: fileName, + parents: [folderId], // 作成したフォルダのIDを指定 + mimeType: mimeType, + }; + const file = await driveClient.files.create({ + requestBody, + // fields: 'id', + }); + + console.log('File Id:', file.data); + if (!file.data.id) return null; + return file.data.id; + } catch (error) { + console.error('Error creating file:', error); + return null; + } + }, + // CAUTION + deleteFile: async (driveClient: drive_v3.Drive, fileId: string) => { + try { + const body = { trashed: true } + const response = await driveClient.files.update({ + fileId: fileId, + requestBody: body, + }); + console.log('File deleted:', response.data); + } catch (error) { + console.error('Error deleting file:', error); + } + }, + addContentToDocs: async (docsClient: docs_v1.Docs, documentId: string, content: string): Promise => { + try { + const requestBody: docs_v1.Schema$BatchUpdateDocumentRequest = { + requests: [ + { + insertText: { + text: content, + location: { + index: 1, + } + } + } + ] + }; + const response = await docsClient.documents.batchUpdate({ + documentId: documentId, + requestBody: requestBody, + }); + console.log('Content added to document:', response.data); + return true; + } catch (error) { + console.error('Error adding content to document:', error); + return false; + } }, - createNewFile: async() => { + getLogSheetId: async (driveClient: drive_v3.Drive, sheetsClient: sheets_v4.Sheets, folderId: string, fileName: string): Promise => { + try { + const existsSheetId = await googleDriveController.searchFileIdByFileName(driveClient, folderId, fileName); + if(existsSheetId) return existsSheetId; + console.log('=== Create New Sheet ===') + const newSheetId = await googleDriveController.createNewFile(driveClient, folderId, fileName, SHEET_MIMETYPE); + if(!newSheetId) return null; + // + await googleDriveController.insertRowToSheet(sheetsClient, newSheetId, ['※シート名変更厳禁']); + await googleDriveController.insertRowToSheet(sheetsClient, newSheetId, LOG_SHEET_HEADER_VALUES); + return newSheetId; + } catch (error) { + console.error('Error searching files:', error); + return null; + } + }, + + insertRowToSheet: async (sheetsClient: sheets_v4.Sheets, sheetId: string, rowData: string[] ): Promise => { + try { + const body = { + values: [rowData] + } + const params: sheets_v4.Params$Resource$Spreadsheets$Values$Append = { + spreadsheetId: sheetId, + range: 'Sheet1', + valueInputOption: 'USER_ENTERED', + insertDataOption: 'INSERT_ROWS', + requestBody: body, + } + await sheetsClient.spreadsheets.values.append(params); + return true; + } catch (error) { + console.log(error); + return false; + } }, }; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/hubspot.ts b/functions/generate_minutes/src/logics/hubspot.ts index e69de29..e48aa1c 100644 --- a/functions/generate_minutes/src/logics/hubspot.ts +++ b/functions/generate_minutes/src/logics/hubspot.ts @@ -0,0 +1,116 @@ +import { Client } from "@hubspot/api-client"; +import { AssociationSpecAssociationCategoryEnum } from "@hubspot/api-client/lib/codegen/crm/objects/meetings/models/AssociationSpec"; +import { PublicAssociationsForObject } from "@hubspot/api-client/lib/codegen/crm/objects/meetings"; +import z, { email } from "zod"; + +const hubspotClient = new Client({ accessToken: process.env.HUBSPOT_ACCESS_TOKEN }); + +export const CompanySchema = z.object({ + id: z.string(), + name: z.string(), +}) + +export const OwnerSchema = z.object({ + id: z.string(), + email: z.string().optional().default(''), +}); + +export type Company = z.infer; +export type Owner = z.infer; + +export const hubspotController = { + check: async() => { + const response = await hubspotClient.crm.companies.getAll(); + console.log(response.length); + }, + getCompanies: async(): Promise => { + try { + const allCompanies: Company[] = []; + const limit = 100; + let after: string | undefined = undefined; + for(let i = 0; i < 1000; i++) { + console.log(`Fetching companies, iteration ${i+1}`); + const response = await hubspotClient.crm.companies.basicApi.getPage(limit, after); + // console.log(response.results); + const companies: Company[] = response.results.map((company) => CompanySchema.parse({ + id: company.id, + name: company.properties.name, + })); + allCompanies.push(...companies); + + if(response.paging && response.paging.next && response.paging.next.after) { + after = response.paging.next.after; + continue; + } + break; + } + return allCompanies; + } catch (error) { + return null; + } + }, + getOwners: async(): Promise => { + try { + const allOwners: Owner[] = []; + const limit = 100; + let after: string | undefined = undefined; + for(let i = 0; i < 1000; i++) { + console.log(`Fetching owners, iteration ${i+1}`); + const response = await hubspotClient.crm.owners.ownersApi.getPage(undefined,after,limit); + // console.log(response.results); + + const owners: Owner[] = response.results.map((owner) => OwnerSchema.parse({ + id: owner.id, + email: owner.email, + })); + allOwners.push(...owners); + + if(response.paging && response.paging.next && response.paging.next.after) { + after = response.paging.next.after; + continue; + } + break; + } + return allOwners; + } catch (error) { + console.error("Error fetching owners:", error); + return null; + } + }, + createMeetingLog: async(companyId: string, title: string, userId: string | null, minutes: string, startsAt: string, endsAt: string ): Promise => { + try { + // 改行コードを変換 + const minutes_html = minutes.replace("\n", "
") + const associations: PublicAssociationsForObject[] = [{ + types: [ + {associationCategory: AssociationSpecAssociationCategoryEnum.HubspotDefined, associationTypeId: 188}, + ], + to: {id: companyId}, + }]; + + const properties = { + hs_timestamp: startsAt, + hs_meeting_title: title, + hubspot_owner_id: userId || '', + hs_meeting_body: minutes_html, + hs_meeting_start_time: startsAt, + hs_meeting_end_time: endsAt, + } + + const result = await hubspotClient.crm.objects.meetings.basicApi.create({ + associations: associations, + properties: properties, + }); + return true; + } catch (error) { + console.error("Error creating HubSpot meeting log:", error); + return false; + } + }, + searchOwnerIdByEmail: (email: string, owners: Owner[]): string | null => { + for(const owner of owners) { + if(email === owner.email) return owner.id; + } + return null; + }, +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/process.ts b/functions/generate_minutes/src/logics/process.ts index b52d62a..1b5764e 100644 --- a/functions/generate_minutes/src/logics/process.ts +++ b/functions/generate_minutes/src/logics/process.ts @@ -1,7 +1,15 @@ import z from "zod"; import { aiController } from "./ai"; import { dateController } from "./date"; -import { googleDriveController } from "./googleDrive"; +import { googleDriveController, LogRowData, LogRowDataSchema } from "./googleDrive"; +import { fileController } from "./file"; +import path, { join } from "path"; +import fs from "fs"; +import { createCustomError, responseError } from "./error"; +import { storageController } from "./storage"; +import { CLOUD_STORAGE_MASTER_FOLDER_NAME, DATE_FORMAT, DATETIME_FORMAT, DOCUMENT_MIMETYPE, OWNERS_FILE_NAME, Y_FORMAT, YM_FORMAT } from "../../serverConfig"; +import { hubspotController, OwnerSchema } from "./hubspot"; +import { fuzzyMatchController } from "./fuzzyMatch"; const VideoInfoSchema = z.looseObject({ id: z.string(), @@ -26,32 +34,101 @@ export const MiiTelWebhookSchema = z.object({ // export type MiiTelWebhook = z.infer; -export const processRequest = async(videoInfo: VideoInfo) => { - const videoId = videoInfo.id; - const title = videoInfo.title; - const startsAt = videoInfo.starts_at; - const endsAt = videoInfo.ends_at; - const accessPermission = videoInfo.access_permission; - - const host_id = videoInfo.host.login_id; - const host_name = videoInfo.host.user_name; - - const speechRecognition = videoInfo.speech_recognition.raw; - - console.log(startsAt); - const jstStartsAt = dateController.convertToJst(startsAt); - const jstEndsAt = dateController.convertToJst(endsAt); - - googleDriveController.checkConnection(); - // console.log(dateController.getFormattedDate(startsAtJst, "yyyy/MM/dd hh:mm:ss")); - // console.log(endsAt); - // console.log("Processing video:", host_id, host_name, title); - if(accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return; - +const GOOGLE_DRIVE_FOLDER_ID = process.env.GOOGLE_DRIVE_FOLDER_ID || ''; +const MIITEL_REQUEST_LOG_FOLDER_ID = process.env.MIITEL_REQUEST_LOG_FOLDER_ID || ''; +const MINUTES_CREATION_HISTORY_FOLDER_ID = process.env.MINUTES_CREATION_HISTORY_FOLDER_ID || ''; +const MIITEL_URL = process.env.MIITEL_URL || ''; +const HUBSPOT_COMPANY_URL = process.env.HUBSPOT_COMPANY_URL || ''; - // Save Request Log to Google Drive - // const minute = await aiController.generateMinutes(speechRecognition); - // console.log(minute); +const FILE_PATH = join(__dirname, "../files/"); - }; \ No newline at end of file +export const processRequest = async (videoInfo: VideoInfo) => { + try { + const videoId = videoInfo.id; + const title = videoInfo.title; + const startsAt = videoInfo.starts_at; + const endsAt = videoInfo.ends_at; + const accessPermission = videoInfo.access_permission; + + const hostId = videoInfo.host.login_id; + const hostName = videoInfo.host.user_name; + + const speechRecognition = videoInfo.speech_recognition.raw; + + const jstStartsAt = dateController.convertToJst(startsAt); + const jstEndsAt = dateController.convertToJst(endsAt); + const fileName = fileController.createMinutesFileName(title, hostName, jstStartsAt); + const videoUrl = `${MIITEL_URL}app/video/${videoId}`; + + if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return; + + // + const googleAuth = await googleDriveController.getAuth(); + const driveClient = googleDriveController.getDriveClient(googleAuth); + const docsClient = googleDriveController.getDocsClient(googleAuth); + const sheetsClient = googleDriveController.getSheetsClient(googleAuth); + + // ===== Save Request Log to Google Drive ===== + if (!fs.existsSync(FILE_PATH)) fs.mkdirSync(FILE_PATH, { recursive: true }); + const outputPath = path.join(FILE_PATH, fileName + '.zip'); + await fileController.createZip(videoInfo, outputPath, fileName); + + const logFileId = await googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, fileName + '.zip'); + if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED"); + + // ===== Generate Minutes ===== + const minutes = await aiController.generateMinutes(speechRecognition); + console.log(minutes); + if (!minutes) throw createCustomError("AI_GENERATION_FAILED"); + let content = `会議履歴URL:${videoUrl}\n`; + content += `担当者:${hostName}\n\n`; + content += minutes; + + + // ===== Upload To Google Drive ===== + const documentId = await googleDriveController.createNewFile(driveClient, GOOGLE_DRIVE_FOLDER_ID, title, DOCUMENT_MIMETYPE); + if (!documentId) throw createCustomError("UPLOAD_MINUTES_FAILED"); + const result = await googleDriveController.addContentToDocs(docsClient, documentId, minutes); + if(!result) throw createCustomError("UPLOAD_MINUTES_FAILED"); + + // ===== Create Meeting Log at Hubspot ===== + const ownersJson = await storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME); + if(!ownersJson) throw createCustomError("GET_OWNERS_FAILED"); + const parsedOwners = z.array(OwnerSchema).safeParse(JSON.parse(ownersJson)); + if(!parsedOwners.success) throw createCustomError("ZOD_FAILED"); + const ownerId = hubspotController.searchOwnerIdByEmail(hostId, parsedOwners.data); + + + const companyName = fileController.extractCompanyNameFromTitle(title); + const matchedCompany = await fuzzyMatchController.searchMatchedCompany(companyName); + if(matchedCompany) await hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, minutes, startsAt, endsAt); + + // ===== Apeend Log To SpreadSheet ===== + const currentYear = dateController.getCurrentJstTime(Y_FORMAT); + const yearFileId = await googleDriveController.getFolderId(driveClient, MINUTES_CREATION_HISTORY_FOLDER_ID, currentYear); + if(!yearFileId) throw createCustomError("GET_FOLDER_ID_FAILED"); + + const currentYearMonth = dateController.getCurrentJstTime(YM_FORMAT); + const sheetId = await googleDriveController.getLogSheetId(driveClient, sheetsClient, yearFileId, currentYearMonth); + if(!sheetId) throw createCustomError("GET_SHEET_ID_FAILED"); + + const currentJstDateTimeStr = dateController.getCurrentJstTime(DATETIME_FORMAT); + const currentJstDateStr = dateController.getCurrentJstTime(DATE_FORMAT); + const rowData: LogRowData = LogRowDataSchema.parse({ + timestamp: currentJstDateTimeStr, + meetingDate: currentJstDateStr, + title: title, + matchedCompanyName: matchedCompany?.name ?? '', + ownerName: hostName, + meetingUrl: videoUrl, + documentUrl: `https://docs.google.com/document/d/${documentId}/edit`, + hubspotUrl: matchedCompany ? `${HUBSPOT_COMPANY_URL}/${matchedCompany.id}` : '', + }); + await googleDriveController.insertRowToSheet(sheetsClient, sheetId, Object.values(rowData)); + return; + } catch (error) { + responseError(error); + return; + } +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/storage.ts b/functions/generate_minutes/src/logics/storage.ts index 31b14df..8275145 100644 --- a/functions/generate_minutes/src/logics/storage.ts +++ b/functions/generate_minutes/src/logics/storage.ts @@ -1,19 +1,15 @@ import { Storage } from "@google-cloud/storage"; import zlib from "zlib"; -const csClient = new Storage({ - projectId: 'datacom-poc', -} -); -const BUCKET_NAME = "meeting-report-data"; +const csClient = new Storage({projectId: process.env.PROJECT_ID}); +const BUCKET_NAME = process.env.CLOUD_STORAGE_BUCKET_NAME || ''; const bucket = csClient.bucket(BUCKET_NAME); export const storageController = { - saveToGCS: async(folder: string, filename: string, text: string) => { - const gzipped = zlib.gzipSync(text); - const file = bucket.file((`${folder}/${filename}.json.gz`)); - await file.save(gzipped, { - contentType: 'application/gzip', + saveToGCS: async(folder: string, filename: string, content: any, contentType: string) => { + const file = bucket.file((`${folder}/${filename}`)); + await file.save(content, { + contentType: contentType, }) }, loadFromGCS: async(folder: string, filename: string): Promise => { @@ -26,6 +22,16 @@ export const storageController = { return null; } }, + loadJsonFromGCS: async(folder: string, filename: string): Promise => { + const file = bucket.file(`${folder}/${filename}`); + // console.log("loading file:", file.name); + try { + const [data] = await file.download(); + return data.toString("utf-8"); + } catch (err: any) { + return null; + } + }, existsInGCS: async(folder: string, filename: string): Promise => { const file = bucket.file((`${folder}/${filename}`)); console.log("checking file:", file.name); diff --git a/functions/generate_minutes/src/stores/errorCodes.ts b/functions/generate_minutes/src/stores/errorCodes.ts new file mode 100644 index 0000000..3533169 --- /dev/null +++ b/functions/generate_minutes/src/stores/errorCodes.ts @@ -0,0 +1,26 @@ +// errorDefinitions.ts + +export const ERROR_DEFINITIONS = { + ZOD_FAILED: { code: "E1003", message: "zodのチェックが失敗しました", statusCode: -1 }, + // ログ ZIP の Google Drive アップロード失敗 + UPLOAD_LOG_FAILED: { code: "E3001", message: "ログファイルのアップロードに失敗しました", statusCode: 500 }, + + // AI による議事録生成失敗 + AI_GENERATION_FAILED: { code: "E2001", message: "AIによる議事録生成に失敗しました", statusCode: 500 }, + + // 議事録(Google Docs)の作成/アップロード失敗 + UPLOAD_MINUTES_FAILED: { code: "E3002", message: "議事録のアップロードに失敗しました", statusCode: 500 }, + + // オーナー情報の取得失敗 + GET_OWNERS_FAILED: { code: "E3003", message: "オーナー情報の取得に失敗しました", statusCode: 500 }, + GET_COMPANIES_FAILED: { code: "E3004", message: "会社情報の取得に失敗しました", statusCode: 500 }, + + // 議事録作成履歴スプレッドシートの取得失敗 + GET_MINUTES_HISTORY_FAILED: { code: "E3005", message: "議事録作成履歴の取得に失敗しました", statusCode: 500 }, + + + GET_FOLDER_ID_FAILED: { code: "E3006", message: "フォルダID取得に失敗しました", statusCode: 500 }, + GET_SHEET_ID_FAILED: { code: "E3007", message: "スプレッドシートID取得に失敗しました", statusCode: 500 }, +} as const; + +export type ErrorKey = keyof typeof ERROR_DEFINITIONS; diff --git a/functions/trigger-minutes-workflow-from-miitel/.env_debug b/functions/trigger-minutes-workflow-from-miitel/.env_debug deleted file mode 100755 index 195ea91..0000000 --- a/functions/trigger-minutes-workflow-from-miitel/.env_debug +++ /dev/null @@ -1,4 +0,0 @@ -PROJECT_ID=datacom-poc -LOCATION=asia-northeast1 -BUCKET=meeting-report-data -WORKFLOW=mrt-workflow-create-minutes diff --git a/functions/trigger-minutes-workflow-from-miitel/.env_dev b/functions/trigger-minutes-workflow-from-miitel/.env_dev deleted file mode 100755 index e0baea9..0000000 --- a/functions/trigger-minutes-workflow-from-miitel/.env_dev +++ /dev/null @@ -1,4 +0,0 @@ -PROJECT_ID: datacom-poc -LOCATION: asia-northeast1 -BUCKET: meeting-report-data -WORKFLOW: mrt-workflow-create-minutes diff --git a/functions/trigger-minutes-workflow-from-miitel/.env_prod b/functions/trigger-minutes-workflow-from-miitel/.env_prod deleted file mode 100755 index 1141bab..0000000 --- a/functions/trigger-minutes-workflow-from-miitel/.env_prod +++ /dev/null @@ -1,4 +0,0 @@ -PROJECT_ID: rational-timing-443808-u0 -LOCATION: asia-northeast1 -BUCKET: meeting-data -WORKFLOW: mrt-workflow-create-minutes diff --git a/functions/trigger-minutes-workflow-from-miitel/_scripts/deploy_dev.sh b/functions/trigger-minutes-workflow-from-miitel/_scripts/deploy_dev.sh deleted file mode 100755 index 022d3a0..0000000 --- a/functions/trigger-minutes-workflow-from-miitel/_scripts/deploy_dev.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# プロジェクトIDを設定 -PROJECT_ID="datacom-poc" - -# デプロイする関数名 -FUNCTION_NAME="mrt-trigger-minutes-workflow-from-miitel" - -# 関数のエントリポイント -ENTRY_POINT="handle_request" - -# ランタイム -RUNTIME="python312" - -# リージョン -REGION="asia-northeast1" - -# 環境変数ファイル -ENV_VARS_FILE=".env_dev" - -gcloud auth application-default set-quota-project $PROJECT_ID -gcloud config set project $PROJECT_ID - -# デプロイコマンド -gcloud functions deploy $FUNCTION_NAME \ - --gen2 \ - --region $REGION \ - --runtime $RUNTIME \ - --source=./source \ - --trigger-http \ - --no-allow-unauthenticated \ - --entry-point $ENTRY_POINT \ - --env-vars-file $ENV_VARS_FILE \ No newline at end of file diff --git a/functions/trigger-minutes-workflow-from-miitel/source/main.py b/functions/trigger-minutes-workflow-from-miitel/source/main.py deleted file mode 100755 index 08bf7d7..0000000 --- a/functions/trigger-minutes-workflow-from-miitel/source/main.py +++ /dev/null @@ -1,75 +0,0 @@ -import functions_framework -from google.cloud import storage -from google.cloud.workflows import executions_v1 -from google.cloud.workflows.executions_v1.types import Execution -import json -import os -import gzip - - -# Storage クライアントを作成 -cs_client = storage.Client() -wf_client = executions_v1.ExecutionsClient() - -@functions_framework.http -def handle_request(request): - # POSTリクエストの処理 - if request.method != 'POST': - # 他のメソッドに対するエラーレスポンス - return ({'error': 'Method not allowed'}, 405) - - try: - request_json = request.get_json() - print(request_json) - - - if "challenge" in request_json: - # MiiTelのチャレンジリクエストに対する応答 - return (request_json["challenge"], 200, {'Content-Type':'text/plain'}) - - project_id = os.getenv("PROJECT_ID") - bucket_name = os.getenv("BUCKET") # 共有ドライブID - location = os.getenv("LOCATION") # ワークフローのロケーション - workflow = os.getenv("WORKFLOW") # ワークフロー名 - - # デバッグ用に保存 - save_to_gcs(bucket_name,request_json) - - # ワークフロー呼び出し - argument = json.dumps({"video": request_json["video"]}) - execution = Execution(argument=argument) - parent = f"projects/{project_id}/locations/{location}/workflows/{workflow}" - print(parent) - response = wf_client.create_execution(request={"parent": parent, "execution": execution}) - print(f"Workflow execution started: {response.name}") - - return (json.dumps({}), 200, {'Content-Type': 'application/json'}) - except Exception as e: - # エラー - error_response = { - "error": str(e) #エラー内容 - } - print(str(e)) - return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー - - - - -def save_to_gcs(bucket_name,request_json): - file_name = request_json["video"]["id"] + ".json.gz" - - bucket = cs_client.bucket(bucket_name) - - # GCS バケットのブロブを取得 - blob = bucket.blob(f"request_log/{file_name}") - - - # JSONを文字列に変換 - json_string = json.dumps(request_json) - - # Gzip圧縮 - compressed_data = gzip.compress(json_string.encode('utf-8')) - - # 圧縮されたデータをアップロード - blob.upload_from_string(compressed_data, content_type='application/gzip') - diff --git a/functions/trigger-minutes-workflow-from-miitel/source/requirements.txt b/functions/trigger-minutes-workflow-from-miitel/source/requirements.txt deleted file mode 100755 index 1f7ed5c..0000000 --- a/functions/trigger-minutes-workflow-from-miitel/source/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -functions-framework==3.* -Flask -google-cloud-storage -google-cloud-workflows \ No newline at end of file diff --git a/functions/upload-minutes-to-drive/.env_debug b/functions/upload-minutes-to-drive/.env_debug deleted file mode 100755 index 4331911..0000000 --- a/functions/upload-minutes-to-drive/.env_debug +++ /dev/null @@ -1,2 +0,0 @@ -KEY_PATH=projects/570987459910/secrets/sa-create-minutes-key -FOLDER_ID=0AGT_1dSq66qYUk9PVA diff --git a/functions/upload-minutes-to-drive/.env_dev b/functions/upload-minutes-to-drive/.env_dev deleted file mode 100755 index 0fd6eab..0000000 --- a/functions/upload-minutes-to-drive/.env_dev +++ /dev/null @@ -1,2 +0,0 @@ -KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key -FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw diff --git a/functions/upload-minutes-to-drive/.env_prod b/functions/upload-minutes-to-drive/.env_prod deleted file mode 100755 index 2b50f81..0000000 --- a/functions/upload-minutes-to-drive/.env_prod +++ /dev/null @@ -1,2 +0,0 @@ -KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key -FOLDER_ID: 0AGT_1dSq66qYUk9PVA diff --git a/functions/upload-minutes-to-drive/_scripts/deploy_dev.sh b/functions/upload-minutes-to-drive/_scripts/deploy_dev.sh deleted file mode 100755 index f4dbcee..0000000 --- a/functions/upload-minutes-to-drive/_scripts/deploy_dev.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# プロジェクトIDを設定 -PROJECT_ID="datacom-poc" - -# デプロイする関数名 -FUNCTION_NAME="mrt-create-minutes" - -# 関数のエントリポイント -ENTRY_POINT="handle_request" - -# ランタイム -RUNTIME="python312" - -# リージョン -REGION="asia-northeast1" - -# 環境変数ファイル -ENV_VARS_FILE=".env_dev" - -gcloud auth application-default set-quota-project $PROJECT_ID -gcloud config set project $PROJECT_ID - -# デプロイコマンド -gcloud functions deploy $FUNCTION_NAME \ - --gen2 \ - --region $REGION \ - --runtime $RUNTIME \ - --source=./source \ - --trigger-http \ - --no-allow-unauthenticated \ - --entry-point $ENTRY_POINT \ - --env-vars-file $ENV_VARS_FILE \ No newline at end of file diff --git a/functions/upload-minutes-to-drive/source/main.py b/functions/upload-minutes-to-drive/source/main.py deleted file mode 100755 index 46b6b64..0000000 --- a/functions/upload-minutes-to-drive/source/main.py +++ /dev/null @@ -1,128 +0,0 @@ -import functions_framework -from google.cloud import secretmanager -from google.oauth2 import service_account -from googleapiclient.discovery import build -from googleapiclient.errors import HttpError -import json -import os - -SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] - -sm_client = secretmanager.SecretManagerServiceClient() - -@functions_framework.http -def handle_request(request): - # POSTリクエストの処理 - if request.method != 'POST': - # 他のメソッドに対するエラーレスポンス - return ({'error': 'Method not allowed'}, 405) - - try: - request_json = request.get_json() - print(request_json) - - folder_id = os.getenv("FOLDER_ID") # 共有ドライブID - - file_name = request_json["file_name"] # 会議タイトル - minutes = request_json["minutes"] # 議事録 - - - # Secret Manager からサービスアカウントJSON文字列を取得 - service_account_info = get_service_account_info() - # 認証 - credentials = get_credentials(service_account_info) - - # APIクライアントの構築 - drive_service = build("drive", "v3", credentials=credentials) - docs_service = build("docs", "v1", credentials=credentials) - - # ファイル作成 - document_id = create_new_document(drive_service, folder_id, file_name) - print(f"Created document with ID: {document_id}") - - # テキスト内容をセット - append_minutes_to_doc(docs_service, document_id, minutes) - - response_data = { - "document_id": document_id, # 作成したドキュメントのID - } - - return json.dumps(response_data) , 200, {"Content-Type": "application/json"} - except Exception as e: - # エラー - error_response = { - "error": str(e) #エラー内容 - } - print(str(e)) - return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー - - - -# -# SecretManagerから秘密鍵を取得 -# -def get_service_account_info(): - key_path = os.getenv('KEY_PATH') + "/versions/1" - # 秘密鍵取得 - response = sm_client.access_secret_version(name=key_path) - # 秘密鍵の値をデコード - secret_key = response.payload.data.decode("UTF-8") - return json.loads(secret_key) - -# Google Drive認証 -def get_credentials(service_account_info): - credentials = service_account.Credentials.from_service_account_info( - service_account_info, - scopes=SCOPES - ) - return credentials - - -def create_new_document(service,folder_id,title): - """ - Google Sheets APIを使用して新しいスプレッドシートを作成する - :param service: Google Sheets APIのサービスオブジェクト - :param title: スプレッドシートのタイトル - :return: 作成したスプレッドシートのID - """ - file_metadata = { - 'name': title, - 'parents': [folder_id], # 作成したフォルダのIDを指定 - 'mimeType': 'application/vnd.google-apps.document', - } - result = ( - service.files() - .create(body=file_metadata, fields="id", supportsAllDrives=True) - .execute() - ) - return result.get("id") - - -def append_minutes_to_doc(service, document_id, minutes): - """ - Google Sheets APIを使用してスプレッドシートにログを追加する - :param service: Google Sheets APIのサービスオブジェクト - :param spreadsheet_id: スプレッドシートのID - :param row_data: 追加するログデータ(リスト形式) - """ - requests = [ - { - 'insertText': { - 'location': { - 'index': 1, - }, - 'text': minutes - } - }, - ] - - body = { - 'requests': requests - } - - # スプレッドシートにログを追加 - result = service.documents().batchUpdate( - documentId=document_id, - body=body, - ).execute() - return result diff --git a/functions/upload-minutes-to-drive/source/requirements.txt b/functions/upload-minutes-to-drive/source/requirements.txt deleted file mode 100755 index e809a11..0000000 --- a/functions/upload-minutes-to-drive/source/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -functions-framework==3.* -google-cloud-secret-manager -google-api-python-client -google-auth-httplib2 -google-auth-oauthlib \ No newline at end of file diff --git a/workflows/workflow-create-minutes/_scripts/deploy_dev.sh b/workflows/workflow-create-minutes/_scripts/deploy_dev.sh deleted file mode 100755 index 15b0428..0000000 --- a/workflows/workflow-create-minutes/_scripts/deploy_dev.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -# 環境変数 -PROJECT_ID="datacom-poc" -WORKFLOW_NAME="mrt-workflow-create-minutes" - - -gcloud auth application-default set-quota-project $PROJECT_ID -gcloud config set project $PROJECT_ID - - -gcloud workflows deploy $WORKFLOW_NAME \ - --source=main.yaml \ - --location=asia-northeast1 \ No newline at end of file diff --git a/workflows/workflow-create-minutes/main.yaml b/workflows/workflow-create-minutes/main.yaml deleted file mode 100755 index dedd621..0000000 --- a/workflows/workflow-create-minutes/main.yaml +++ /dev/null @@ -1,71 +0,0 @@ -main: - params: [input] - steps: - - initialize: - assign: - - project_id: ${sys.get_env("GOOGLE_CLOUD_PROJECT_ID")} - - create_hubspot_meeting_log_result: {} - - upload_minutes_to_drive_result: {} - - generate_meeting_minutes: - call: http.post - args: - url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-generate-meeting-minutes"} - body: - video: ${input.video} - auth: - type: OIDC - result: generate_meeting_minutes_result - - conditinal_switch: - switch: - - condition: ${generate_meeting_minutes_result.body.status != "end"} - steps: - - parallel_execute: - parallel: - shared: - [ - create_hubspot_meeting_log_result, - upload_minutes_to_drive_result, - ] - branches: - - create_hubspot_meeting_log_branch: - steps: - - create_hubspot_meeting_log: - call: http.post - args: - url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-create-hubspot-meeting-log"} - body: - title: ${generate_meeting_minutes_result.body.title} - host_id: ${generate_meeting_minutes_result.body.host_id} - starts_at: ${generate_meeting_minutes_result.body.starts_at} - ends_at: ${generate_meeting_minutes_result.body.ends_at} - minutes: ${generate_meeting_minutes_result.body.minutes} - auth: - type: OIDC - result: create_hubspot_meeting_log_result - - upload_minutes_to_drive_branch: - steps: - - upload-minutes-to-drive: - call: http.post - args: - url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-upload-minutes-to-drive"} - body: - file_name: ${generate_meeting_minutes_result.body.file_name} - minutes: ${generate_meeting_minutes_result.body.minutes} - auth: - type: OIDC - result: upload_minutes_to_drive_result - - append_log_to_sheet: - call: http.post - args: - url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-append-log-to-sheet"} - body: - title: ${generate_meeting_minutes_result.body.title} - host_name: ${generate_meeting_minutes_result.body.host_name} - video_url: ${generate_meeting_minutes_result.body.video_url} - starts_at: ${generate_meeting_minutes_result.body.starts_at} - matched_company_id: ${create_hubspot_meeting_log_result.body.matched_company_id} - matched_company_name: ${create_hubspot_meeting_log_result.body.matched_company_name} - document_id: ${upload_minutes_to_drive_result.body.document_id} - auth: - type: OIDC - result: append_log_to_sheet_result From c004f6c34f2b41e091603b4fda3b475e85b22b8f Mon Sep 17 00:00:00 2001 From: kosukesuenaga Date: Fri, 5 Dec 2025 16:01:59 +0900 Subject: [PATCH 3/6] add re-execute API error hundling --- functions/generate_minutes/serverConfig.ts | 3 - functions/generate_minutes/src/apiRouter.ts | 31 +++++++-- functions/generate_minutes/src/logics/file.ts | 44 +++++++------ .../src/logics/googleDrive.ts | 32 +++------ .../generate_minutes/src/logics/process.ts | 65 +++++++++---------- .../generate_minutes/src/stores/errorCodes.ts | 17 +++-- 6 files changed, 99 insertions(+), 93 deletions(-) diff --git a/functions/generate_minutes/serverConfig.ts b/functions/generate_minutes/serverConfig.ts index 0fb6426..248cef1 100644 --- a/functions/generate_minutes/serverConfig.ts +++ b/functions/generate_minutes/serverConfig.ts @@ -1,10 +1,7 @@ -import { join } from "path"; export const GEMINI_MODEL_ID = "gemini-2.5-flash"; export const DEBUG = true; -export const CREDENTIALS_PATH = join(__dirname, process.env.SEARVICE_ACCOUNT_CREDENTIALS_FILE || ''); - export const CLOUD_STORAGE_MASTER_FOLDER_NAME = "master"; export const CLOUD_STORAGE_LOG_FOLDER_NAME = "request_logs"; export const COMPANIES_FILE_NAME = "companies.json"; diff --git a/functions/generate_minutes/src/apiRouter.ts b/functions/generate_minutes/src/apiRouter.ts index ad06446..0c04172 100644 --- a/functions/generate_minutes/src/apiRouter.ts +++ b/functions/generate_minutes/src/apiRouter.ts @@ -3,7 +3,7 @@ import zlib from "zlib"; import { storageController } from "./logics/storage"; import { MiiTelWebhookSchema, processRequest } from "./logics/process"; import { hubspotController } from "./logics/hubspot"; -import { createCustomError } from "./logics/error"; +import { createCustomError, responseError } from "./logics/error"; import { CLOUD_STORAGE_LOG_FOLDER_NAME, CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, OWNERS_FILE_NAME } from "../serverConfig"; const router = express.Router(); @@ -20,14 +20,15 @@ router.post("/miitel", async (req, res) => { await storageController.saveToGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${videoInfo.id}.json.gz`, gzipped, 'application/gzip'); await processRequest(videoInfo); + // if(!result) throw res.status(200).send("ok"); } catch(err) { - res.status(400).send("Invalid webhook body"); + responseError(err, res) } }); -// Update Master Data And Check Google Drive Folder +// Refresh Master Data Everyday router.post("/dailyBatch", async (req, res) => { try { console.log("Starting daily batch process..."); @@ -41,7 +42,6 @@ router.post("/dailyBatch", async (req, res) => { if(!owners) throw createCustomError("GET_COMPANIES_FAILED"); await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME, JSON.stringify(owners), 'application/json'); - // check folders in Google Drive res.status(200).send("Daily batch executed."); } catch (error) { @@ -61,6 +61,29 @@ router.post("/getLog", async (req, res) => { }); +// Check Log By Meeting ID +router.post("/reExecute", async (req, res) => { + try { + console.log(req.body); + const meetingId = req.body.meetingId; + const newTitle = req.body.newTitle; + const log = await storageController.loadFromGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${meetingId}.json.gz`); + if(!log) throw Error(); + const params = MiiTelWebhookSchema.safeParse(JSON.parse(log)); + if(!params.success) throw createCustomError("ZOD_FAILED"); + params.data.video.title = newTitle; + // console.log(params.data.video) + + await processRequest(params.data.video); + + res.send(log); + } catch(error) { + console.log("===== Route Log =====") + console.log(error); + res.status(400).send("Failed"); + } +}); + // router.post("/deleteFile", async (req, res) => { // console.log(req.body); diff --git a/functions/generate_minutes/src/logics/file.ts b/functions/generate_minutes/src/logics/file.ts index 39411db..bced88b 100644 --- a/functions/generate_minutes/src/logics/file.ts +++ b/functions/generate_minutes/src/logics/file.ts @@ -24,30 +24,32 @@ export const fileController = { minutesContent += minutes; return minutesContent; }, - createZip: async (body: any, outputPath: string, fileName: string) => { - console.log(outputPath); - await new Promise((resolve, reject) => { - const output = fs.createWriteStream(outputPath); - const archive = archiver('zip', { - zlib: { level: 9 } - }); + createZip: async (body: any, outputPath: string, fileName: string): Promise => { + try { + await new Promise((resolve, reject) => { + const output = fs.createWriteStream(outputPath); + const archive = archiver('zip', { + zlib: { level: 9 } + }); - output.on('close', () => { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); - resolve(true); - }); + output.on('close', () => { + // console.log(archive.pointer() + ' total bytes'); + // console.log('archiver has been finalized and the output file descriptor has closed.'); + resolve(true); + }); - archive.on('error', (err) => { - reject(err); - }); + archive.on('error', (err) => { + reject(err); + }); - archive.pipe(output); - archive.append(JSON.stringify(body), { name: fileName + '.json' }); - archive.finalize(); - }) - console.log("ZIP created"); - return; + archive.pipe(output); + archive.append(JSON.stringify(body), { name: fileName + '.json' }); + archive.finalize(); + }) + return true; + } catch(error) { + return false; + } }, }; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/googleDrive.ts b/functions/generate_minutes/src/logics/googleDrive.ts index 50bd69c..a8ede92 100644 --- a/functions/generate_minutes/src/logics/googleDrive.ts +++ b/functions/generate_minutes/src/logics/googleDrive.ts @@ -1,8 +1,10 @@ import { docs_v1, drive_v3, google, sheets_v4 } from "googleapis"; import fs from "fs"; -import { CREDENTIALS_PATH, DEBUG, FOLDER_MIMETYPE, LOG_SHEET_HEADER_VALUES, SHEET_MIMETYPE } from "../../serverConfig"; +import { DEBUG, LOG_SHEET_HEADER_VALUES, SHEET_MIMETYPE } from "../../serverConfig"; import z from "zod"; +const GOOGLE_DRIVE_FOLDER_ID = process.env.GOOGLE_DRIVE_FOLDER_ID; + const SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] const MAX_RETRY = 3; @@ -23,7 +25,6 @@ export const googleDriveController = { getAuth: async (): Promise => { try { const credentials = JSON.parse(process.env.SEARVICE_ACCOUNT_CREDENTIALS || "{}"); - console.log(credentials) const auth = await new google.auth.GoogleAuth({ credentials: credentials, scopes: SCOPES, @@ -49,20 +50,20 @@ export const googleDriveController = { return docs; }, - uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string): Promise => { + uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string, contentType: string): Promise => { try { - console.log("Uploading file to Google Drive:", filePath); + // console.log("Uploading file to Google Drive:", filePath); const response = await driveClient.files.create({ requestBody: { name: fileName, parents: [folderId], }, media: { - mimeType: "application/zip", + mimeType: contentType, body: fs.createReadStream(filePath), }, }); - console.log("File uploaded, Id:", response.data.id); + // console.log("File uploaded, Id:", response.data.id); fs.unlinkSync(filePath); return response.data.id; } catch (error) { @@ -71,25 +72,12 @@ export const googleDriveController = { return null; } }, - getFolderId: async (driveClient: drive_v3.Drive, folderId: string, fileName: string): Promise => { - try { - const existsFolderId = await googleDriveController.searchFileIdByFileName(driveClient, folderId, fileName); - if(existsFolderId) return existsFolderId; - console.log('=== Create New Folder ===') - const newFolderId = googleDriveController.createNewFile(driveClient, folderId, fileName, FOLDER_MIMETYPE); - if(!newFolderId) return null; - return newFolderId; - } catch (error) { - console.error('Error searching files:', error); - return null; - } - }, searchFileIdByFileName: async (driveClient: drive_v3.Drive, folderId: string, fileName: string): Promise => { try { const params = googleDriveController.getSearchFileParamsByDebugMode(folderId); const res = await driveClient.files.list(params); - console.log("Files:"); - console.log(res.data.files); + // console.log("Files:"); + // console.log(res.data.files); if(!res.data.files) return null; for(const file of res.data.files) { @@ -118,7 +106,7 @@ export const googleDriveController = { } return { corpora: 'drive', - driveId: process.env.GOOGLE_DRIVE_FOLDER_ID, + driveId: GOOGLE_DRIVE_FOLDER_ID, q: `'${folderId}' in parents`, pageSize: 10, fields: "files(id, name)", diff --git a/functions/generate_minutes/src/logics/process.ts b/functions/generate_minutes/src/logics/process.ts index 1b5764e..7aef059 100644 --- a/functions/generate_minutes/src/logics/process.ts +++ b/functions/generate_minutes/src/logics/process.ts @@ -5,9 +5,9 @@ import { googleDriveController, LogRowData, LogRowDataSchema } from "./googleDri import { fileController } from "./file"; import path, { join } from "path"; import fs from "fs"; -import { createCustomError, responseError } from "./error"; +import { createCustomError } from "./error"; import { storageController } from "./storage"; -import { CLOUD_STORAGE_MASTER_FOLDER_NAME, DATE_FORMAT, DATETIME_FORMAT, DOCUMENT_MIMETYPE, OWNERS_FILE_NAME, Y_FORMAT, YM_FORMAT } from "../../serverConfig"; +import { CLOUD_STORAGE_MASTER_FOLDER_NAME, DATE_FORMAT, DATETIME_FORMAT, DOCUMENT_MIMETYPE, OWNERS_FILE_NAME, YM_FORMAT } from "../../serverConfig"; import { hubspotController, OwnerSchema } from "./hubspot"; import { fuzzyMatchController } from "./fuzzyMatch"; @@ -32,8 +32,6 @@ export const MiiTelWebhookSchema = z.object({ video: VideoInfoSchema, }); -// export type MiiTelWebhook = z.infer; - const GOOGLE_DRIVE_FOLDER_ID = process.env.GOOGLE_DRIVE_FOLDER_ID || ''; const MIITEL_REQUEST_LOG_FOLDER_ID = process.env.MIITEL_REQUEST_LOG_FOLDER_ID || ''; const MINUTES_CREATION_HISTORY_FOLDER_ID = process.env.MINUTES_CREATION_HISTORY_FOLDER_ID || ''; @@ -43,6 +41,8 @@ const HUBSPOT_COMPANY_URL = process.env.HUBSPOT_COMPANY_URL || ''; const FILE_PATH = join(__dirname, "../files/"); +let outputPath = ''; + export const processRequest = async (videoInfo: VideoInfo) => { try { const videoId = videoInfo.id; @@ -50,36 +50,35 @@ export const processRequest = async (videoInfo: VideoInfo) => { const startsAt = videoInfo.starts_at; const endsAt = videoInfo.ends_at; const accessPermission = videoInfo.access_permission; - const hostId = videoInfo.host.login_id; const hostName = videoInfo.host.user_name; - const speechRecognition = videoInfo.speech_recognition.raw; + if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return; + + // ===== Init ===== + const googleAuth = await googleDriveController.getAuth(); + const driveClient = googleDriveController.getDriveClient(googleAuth); + const docsClient = googleDriveController.getDocsClient(googleAuth); + const sheetsClient = googleDriveController.getSheetsClient(googleAuth); + const jstStartsAt = dateController.convertToJst(startsAt); const jstEndsAt = dateController.convertToJst(endsAt); const fileName = fileController.createMinutesFileName(title, hostName, jstStartsAt); const videoUrl = `${MIITEL_URL}app/video/${videoId}`; - if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return; - - // - const googleAuth = await googleDriveController.getAuth(); - const driveClient = googleDriveController.getDriveClient(googleAuth); - const docsClient = googleDriveController.getDocsClient(googleAuth); - const sheetsClient = googleDriveController.getSheetsClient(googleAuth); // ===== Save Request Log to Google Drive ===== if (!fs.existsSync(FILE_PATH)) fs.mkdirSync(FILE_PATH, { recursive: true }); - const outputPath = path.join(FILE_PATH, fileName + '.zip'); - await fileController.createZip(videoInfo, outputPath, fileName); - - const logFileId = await googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, fileName + '.zip'); + outputPath = path.join(FILE_PATH, fileName + '.zip'); + const createZip = await fileController.createZip(videoInfo, outputPath, fileName); + if(!createZip) throw createCustomError("CREATE_ZIP_FILE_FAILED"); + + const logFileId = await googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, `${fileName}.zip`, "application/zip"); if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED"); // ===== Generate Minutes ===== const minutes = await aiController.generateMinutes(speechRecognition); - console.log(minutes); if (!minutes) throw createCustomError("AI_GENERATION_FAILED"); let content = `会議履歴URL:${videoUrl}\n`; content += `担当者:${hostName}\n\n`; @@ -87,11 +86,12 @@ export const processRequest = async (videoInfo: VideoInfo) => { // ===== Upload To Google Drive ===== - const documentId = await googleDriveController.createNewFile(driveClient, GOOGLE_DRIVE_FOLDER_ID, title, DOCUMENT_MIMETYPE); - if (!documentId) throw createCustomError("UPLOAD_MINUTES_FAILED"); - const result = await googleDriveController.addContentToDocs(docsClient, documentId, minutes); + const documentId = await googleDriveController.createNewFile(driveClient, GOOGLE_DRIVE_FOLDER_ID, fileName, DOCUMENT_MIMETYPE); + if (!documentId) throw createCustomError("CREATE_NEW_DOCUMENT_FAILED"); + const result = await googleDriveController.addContentToDocs(docsClient, documentId, content); if(!result) throw createCustomError("UPLOAD_MINUTES_FAILED"); + // ===== Create Meeting Log at Hubspot ===== const ownersJson = await storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME); if(!ownersJson) throw createCustomError("GET_OWNERS_FAILED"); @@ -99,18 +99,14 @@ export const processRequest = async (videoInfo: VideoInfo) => { if(!parsedOwners.success) throw createCustomError("ZOD_FAILED"); const ownerId = hubspotController.searchOwnerIdByEmail(hostId, parsedOwners.data); - - const companyName = fileController.extractCompanyNameFromTitle(title); - const matchedCompany = await fuzzyMatchController.searchMatchedCompany(companyName); + const extractedCompanyName = fileController.extractCompanyNameFromTitle(title); + const matchedCompany = await fuzzyMatchController.searchMatchedCompany(extractedCompanyName); if(matchedCompany) await hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, minutes, startsAt, endsAt); - // ===== Apeend Log To SpreadSheet ===== - const currentYear = dateController.getCurrentJstTime(Y_FORMAT); - const yearFileId = await googleDriveController.getFolderId(driveClient, MINUTES_CREATION_HISTORY_FOLDER_ID, currentYear); - if(!yearFileId) throw createCustomError("GET_FOLDER_ID_FAILED"); + // ===== Apeend Log To SpreadSheet ===== const currentYearMonth = dateController.getCurrentJstTime(YM_FORMAT); - const sheetId = await googleDriveController.getLogSheetId(driveClient, sheetsClient, yearFileId, currentYearMonth); + const sheetId = await googleDriveController.getLogSheetId(driveClient, sheetsClient, MINUTES_CREATION_HISTORY_FOLDER_ID, currentYearMonth); if(!sheetId) throw createCustomError("GET_SHEET_ID_FAILED"); const currentJstDateTimeStr = dateController.getCurrentJstTime(DATETIME_FORMAT); @@ -125,10 +121,11 @@ export const processRequest = async (videoInfo: VideoInfo) => { documentUrl: `https://docs.google.com/document/d/${documentId}/edit`, hubspotUrl: matchedCompany ? `${HUBSPOT_COMPANY_URL}/${matchedCompany.id}` : '', }); - await googleDriveController.insertRowToSheet(sheetsClient, sheetId, Object.values(rowData)); - return; - } catch (error) { - responseError(error); - return; + const insertResult = await googleDriveController.insertRowToSheet(sheetsClient, sheetId, Object.values(rowData)); + if(!insertResult) throw createCustomError("INSERT_ROW_FAILED"); + fs.unlinkSync(outputPath); + } catch (error) { + fs.unlinkSync(outputPath); + throw error; } }; \ No newline at end of file diff --git a/functions/generate_minutes/src/stores/errorCodes.ts b/functions/generate_minutes/src/stores/errorCodes.ts index 3533169..3ae530f 100644 --- a/functions/generate_minutes/src/stores/errorCodes.ts +++ b/functions/generate_minutes/src/stores/errorCodes.ts @@ -9,18 +9,17 @@ export const ERROR_DEFINITIONS = { AI_GENERATION_FAILED: { code: "E2001", message: "AIによる議事録生成に失敗しました", statusCode: 500 }, // 議事録(Google Docs)の作成/アップロード失敗 - UPLOAD_MINUTES_FAILED: { code: "E3002", message: "議事録のアップロードに失敗しました", statusCode: 500 }, + CREATE_NEW_DOCUMENT_FAILED: { code: "E3002", message: "ドキュメント作成に失敗しました", statusCode: 500 }, + UPLOAD_MINUTES_FAILED: { code: "E3003", message: "議事録のアップロードに失敗しました", statusCode: 500 }, // オーナー情報の取得失敗 - GET_OWNERS_FAILED: { code: "E3003", message: "オーナー情報の取得に失敗しました", statusCode: 500 }, - GET_COMPANIES_FAILED: { code: "E3004", message: "会社情報の取得に失敗しました", statusCode: 500 }, + GET_OWNERS_FAILED: { code: "E3004", message: "オーナー情報の取得に失敗しました", statusCode: 500 }, + GET_COMPANIES_FAILED: { code: "E3005", message: "会社情報の取得に失敗しました", statusCode: 500 }, - // 議事録作成履歴スプレッドシートの取得失敗 - GET_MINUTES_HISTORY_FAILED: { code: "E3005", message: "議事録作成履歴の取得に失敗しました", statusCode: 500 }, - - - GET_FOLDER_ID_FAILED: { code: "E3006", message: "フォルダID取得に失敗しました", statusCode: 500 }, - GET_SHEET_ID_FAILED: { code: "E3007", message: "スプレッドシートID取得に失敗しました", statusCode: 500 }, + GET_FOLDER_ID_FAILED: { code: "E3007", message: "フォルダID取得に失敗しました", statusCode: 500 }, + GET_SHEET_ID_FAILED: { code: "E3008", message: "スプレッドシートID取得に失敗しました", statusCode: 500 }, + CREATE_ZIP_FILE_FAILED: { code: "E3009", message: "ZIPファイルの作成に失敗しました", statusCode: 500 }, + INSERT_ROW_FAILED: { code: "E3009", message: "シートへのデータ追加に失敗しました", statusCode: 500 }, } as const; export type ErrorKey = keyof typeof ERROR_DEFINITIONS; From bb072cc91c404f4f2bf3f798b84c5541dcd7191a Mon Sep 17 00:00:00 2001 From: kosukesuenaga Date: Mon, 8 Dec 2025 14:22:40 +0900 Subject: [PATCH 4/6] api call retry --- functions/generate_minutes/package.json | 1 + functions/generate_minutes/serverConfig.ts | 7 ++- functions/generate_minutes/src/apiRouter.ts | 26 +++++++- functions/generate_minutes/src/logics/ai.ts | 3 +- .../generate_minutes/src/logics/error.ts | 17 ++++++ functions/generate_minutes/src/logics/file.ts | 3 - .../generate_minutes/src/logics/fuzzyMatch.ts | 3 +- .../src/logics/googleDrive.ts | 8 +-- .../generate_minutes/src/logics/process.ts | 61 ++++++++++++++++--- .../generate_minutes/src/logics/storage.ts | 17 ++++++ .../generate_minutes/src/stores/errorCodes.ts | 3 + 11 files changed, 126 insertions(+), 23 deletions(-) diff --git a/functions/generate_minutes/package.json b/functions/generate_minutes/package.json index 09b0bba..beeb1e8 100644 --- a/functions/generate_minutes/package.json +++ b/functions/generate_minutes/package.json @@ -23,6 +23,7 @@ "@google/genai": "^1.30.0", "@hubspot/api-client": "^13.4.0", "archiver": "^7.0.1", + "cerceis-lib": "^2.5.0", "concurrently": "^9.2.1", "dotenv": "^17.2.3", "express": "^4.21.2", diff --git a/functions/generate_minutes/serverConfig.ts b/functions/generate_minutes/serverConfig.ts index 248cef1..f547d2f 100644 --- a/functions/generate_minutes/serverConfig.ts +++ b/functions/generate_minutes/serverConfig.ts @@ -3,7 +3,7 @@ export const GEMINI_MODEL_ID = "gemini-2.5-flash"; export const DEBUG = true; export const CLOUD_STORAGE_MASTER_FOLDER_NAME = "master"; -export const CLOUD_STORAGE_LOG_FOLDER_NAME = "request_logs"; +export const CLOUD_STORAGE_LOG_FOLDER_NAME = "new_request_log"; export const COMPANIES_FILE_NAME = "companies.json"; export const OWNERS_FILE_NAME = "owners.json"; @@ -18,4 +18,7 @@ export const FOLDER_MIMETYPE = 'application/vnd.google-apps.folder'; export const DOCUMENT_MIMETYPE = 'application/vnd.google-apps.document'; export const SHEET_MIMETYPE = 'application/vnd.google-apps.spreadsheet'; -export const LOG_SHEET_HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"] \ No newline at end of file +export const LOG_SHEET_HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"] + +export const MAX_RETRY_COUNT = 3; +export const ROOP_DELAY_MS = 5000; \ No newline at end of file diff --git a/functions/generate_minutes/src/apiRouter.ts b/functions/generate_minutes/src/apiRouter.ts index 0c04172..6077066 100644 --- a/functions/generate_minutes/src/apiRouter.ts +++ b/functions/generate_minutes/src/apiRouter.ts @@ -5,6 +5,7 @@ import { MiiTelWebhookSchema, processRequest } from "./logics/process"; import { hubspotController } from "./logics/hubspot"; import { createCustomError, responseError } from "./logics/error"; import { CLOUD_STORAGE_LOG_FOLDER_NAME, CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, OWNERS_FILE_NAME } from "../serverConfig"; +import { Delay } from "cerceis-lib"; const router = express.Router(); @@ -78,13 +79,36 @@ router.post("/reExecute", async (req, res) => { res.send(log); } catch(error) { - console.log("===== Route Log =====") console.log(error); res.status(400).send("Failed"); } }); +// 過去のログを全てGoogle Driveへアップロード +router.post("/logUpload", async (req, res) => { + try { + const list = await storageController.getFileList(); + if(!list) throw createCustomError("GET_FILES_FAILED"); + for(const l of list){ + console.log(l); + const fileName = l.split('/')[1] + const log = await storageController.loadFromGCS('request_log', fileName); + if(!log) throw createCustomError("GET_FILES_FAILED"); + // console.log(log); + const parsedLog = MiiTelWebhookSchema.safeParse(JSON.parse(log)); + if(!parsedLog.success) throw createCustomError("ZOD_FAILED"); + console.log(parsedLog.data.video.title); + + await Delay(500); + } + res.send('ok'); + } catch(error) { + console.log(error); + res.status(400).send("Failed"); + } +}); + // router.post("/deleteFile", async (req, res) => { // console.log(req.body); // const fileId = req.body.fileId; diff --git a/functions/generate_minutes/src/logics/ai.ts b/functions/generate_minutes/src/logics/ai.ts index ee37ec6..5ba41ad 100644 --- a/functions/generate_minutes/src/logics/ai.ts +++ b/functions/generate_minutes/src/logics/ai.ts @@ -6,7 +6,7 @@ const aiClient = new GoogleGenAI({ }); export const aiController = { - generateMinutes: async(text: string) => { + generateMinutes: async(text: string): Promise => { const prompt = ` あなたは議事録作成のプロフェッショナルです。以下の「文字起こし結果」は営業マンが録音した商談の文字起こしです。以下の制約条件に従い、最高の商談報告の議事録を作成してください。 @@ -28,6 +28,7 @@ export const aiController = { model: process.env.GEMINI_MODEL_ID || "gemini-2.5-flash", contents: prompt, }) + if(!response.text) return null; console.log("AI Response:", response.text); return response.text; } catch (error) { diff --git a/functions/generate_minutes/src/logics/error.ts b/functions/generate_minutes/src/logics/error.ts index 0a742b2..0983399 100644 --- a/functions/generate_minutes/src/logics/error.ts +++ b/functions/generate_minutes/src/logics/error.ts @@ -1,6 +1,8 @@ import { Response } from "express"; import z from "zod"; import { ERROR_DEFINITIONS, ErrorKey } from "../stores/errorCodes"; +import { Delay } from "cerceis-lib"; +import { MAX_RETRY_COUNT, ROOP_DELAY_MS } from "../../serverConfig"; const CustomErrorSchema = z.object({ code: z.string(), @@ -27,3 +29,18 @@ export const responseError = (error: any, res: Response | null = null) => { if(res) return res.status(parsedError.statusCode).send(parsedError.message); } + +export const callFunctionWithRetry = async (fn: () => Promise): Promise => { + for(let retryCount = 0; retryCount <= MAX_RETRY_COUNT; retryCount++) { + try { + const result = await fn(); + if(!result) throw Error(); + return result; + } catch(error) { + if(retryCount === MAX_RETRY_COUNT) return null; + console.warn(`\n\n========== リトライ${retryCount + 1}回目 ==========\n\n`); + await Delay(ROOP_DELAY_MS); + } + } + return null; +}; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/file.ts b/functions/generate_minutes/src/logics/file.ts index bced88b..42837f0 100644 --- a/functions/generate_minutes/src/logics/file.ts +++ b/functions/generate_minutes/src/logics/file.ts @@ -1,8 +1,5 @@ -import { create } from "domain"; import { dateController } from "./date"; -import path, { join } from "path"; import archiver from "archiver"; -import { googleDriveController } from "./googleDrive"; import fs from "fs"; diff --git a/functions/generate_minutes/src/logics/fuzzyMatch.ts b/functions/generate_minutes/src/logics/fuzzyMatch.ts index 211b172..3d90584 100644 --- a/functions/generate_minutes/src/logics/fuzzyMatch.ts +++ b/functions/generate_minutes/src/logics/fuzzyMatch.ts @@ -3,12 +3,13 @@ import { storageController } from "./storage"; import { CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, LEGAL_SUFFIX } from "../../serverConfig"; import { Company, CompanySchema } from "./hubspot"; import z from "zod"; +import { callFunctionWithRetry } from "./error"; export const fuzzyMatchController = { searchMatchedCompany: async(companyName: string): Promise => { try { - const companiesJson = await storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME); + const companiesJson = await callFunctionWithRetry(() => storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME)); if(!companiesJson) return null; const parsedCompanies = z.array(CompanySchema).safeParse(JSON.parse(companiesJson)); if(!parsedCompanies.success) return null; diff --git a/functions/generate_minutes/src/logics/googleDrive.ts b/functions/generate_minutes/src/logics/googleDrive.ts index a8ede92..509b3bd 100644 --- a/functions/generate_minutes/src/logics/googleDrive.ts +++ b/functions/generate_minutes/src/logics/googleDrive.ts @@ -50,7 +50,7 @@ export const googleDriveController = { return docs; }, - uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string, contentType: string): Promise => { + uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string, contentType: string): Promise => { try { // console.log("Uploading file to Google Drive:", filePath); const response = await driveClient.files.create({ @@ -63,12 +63,10 @@ export const googleDriveController = { body: fs.createReadStream(filePath), }, }); - // console.log("File uploaded, Id:", response.data.id); - fs.unlinkSync(filePath); + if(!response.data.id) return null; return response.data.id; } catch (error) { console.error("Error uploading file:", error); - fs.unlinkSync(filePath); return null; } }, @@ -179,7 +177,7 @@ export const googleDriveController = { try { const existsSheetId = await googleDriveController.searchFileIdByFileName(driveClient, folderId, fileName); if(existsSheetId) return existsSheetId; - console.log('=== Create New Sheet ===') + // console.log('=== Create New Sheet ===') const newSheetId = await googleDriveController.createNewFile(driveClient, folderId, fileName, SHEET_MIMETYPE); if(!newSheetId) return null; // diff --git a/functions/generate_minutes/src/logics/process.ts b/functions/generate_minutes/src/logics/process.ts index 7aef059..0e56103 100644 --- a/functions/generate_minutes/src/logics/process.ts +++ b/functions/generate_minutes/src/logics/process.ts @@ -5,7 +5,7 @@ import { googleDriveController, LogRowData, LogRowDataSchema } from "./googleDri import { fileController } from "./file"; import path, { join } from "path"; import fs from "fs"; -import { createCustomError } from "./error"; +import { callFunctionWithRetry, createCustomError } from "./error"; import { storageController } from "./storage"; import { CLOUD_STORAGE_MASTER_FOLDER_NAME, DATE_FORMAT, DATETIME_FORMAT, DOCUMENT_MIMETYPE, OWNERS_FILE_NAME, YM_FORMAT } from "../../serverConfig"; import { hubspotController, OwnerSchema } from "./hubspot"; @@ -74,11 +74,11 @@ export const processRequest = async (videoInfo: VideoInfo) => { const createZip = await fileController.createZip(videoInfo, outputPath, fileName); if(!createZip) throw createCustomError("CREATE_ZIP_FILE_FAILED"); - const logFileId = await googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, `${fileName}.zip`, "application/zip"); + const logFileId = await callFunctionWithRetry(() => googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, `${fileName}.zip`, "application/zip")); if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED"); // ===== Generate Minutes ===== - const minutes = await aiController.generateMinutes(speechRecognition); + const minutes = await callFunctionWithRetry(() => aiController.generateMinutes(speechRecognition)); if (!minutes) throw createCustomError("AI_GENERATION_FAILED"); let content = `会議履歴URL:${videoUrl}\n`; content += `担当者:${hostName}\n\n`; @@ -86,14 +86,14 @@ export const processRequest = async (videoInfo: VideoInfo) => { // ===== Upload To Google Drive ===== - const documentId = await googleDriveController.createNewFile(driveClient, GOOGLE_DRIVE_FOLDER_ID, fileName, DOCUMENT_MIMETYPE); + const documentId = await callFunctionWithRetry(() => googleDriveController.createNewFile(driveClient, GOOGLE_DRIVE_FOLDER_ID, fileName, DOCUMENT_MIMETYPE)); if (!documentId) throw createCustomError("CREATE_NEW_DOCUMENT_FAILED"); - const result = await googleDriveController.addContentToDocs(docsClient, documentId, content); - if(!result) throw createCustomError("UPLOAD_MINUTES_FAILED"); + const addContentResult = await callFunctionWithRetry(() => googleDriveController.addContentToDocs(docsClient, documentId, content)); + if(!addContentResult) throw createCustomError("UPLOAD_MINUTES_FAILED"); // ===== Create Meeting Log at Hubspot ===== - const ownersJson = await storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME); + const ownersJson = await callFunctionWithRetry(() => storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME)); if(!ownersJson) throw createCustomError("GET_OWNERS_FAILED"); const parsedOwners = z.array(OwnerSchema).safeParse(JSON.parse(ownersJson)); if(!parsedOwners.success) throw createCustomError("ZOD_FAILED"); @@ -101,12 +101,15 @@ export const processRequest = async (videoInfo: VideoInfo) => { const extractedCompanyName = fileController.extractCompanyNameFromTitle(title); const matchedCompany = await fuzzyMatchController.searchMatchedCompany(extractedCompanyName); - if(matchedCompany) await hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, minutes, startsAt, endsAt); + if(matchedCompany) { + const createLogResult = await callFunctionWithRetry(() => hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, minutes, startsAt, endsAt)); + if(!createLogResult) throw createCustomError("CREATE_MEETING_LOG_FAILED"); + } // ===== Apeend Log To SpreadSheet ===== const currentYearMonth = dateController.getCurrentJstTime(YM_FORMAT); - const sheetId = await googleDriveController.getLogSheetId(driveClient, sheetsClient, MINUTES_CREATION_HISTORY_FOLDER_ID, currentYearMonth); + const sheetId = await callFunctionWithRetry(() => googleDriveController.getLogSheetId(driveClient, sheetsClient, MINUTES_CREATION_HISTORY_FOLDER_ID, currentYearMonth)); if(!sheetId) throw createCustomError("GET_SHEET_ID_FAILED"); const currentJstDateTimeStr = dateController.getCurrentJstTime(DATETIME_FORMAT); @@ -121,11 +124,49 @@ export const processRequest = async (videoInfo: VideoInfo) => { documentUrl: `https://docs.google.com/document/d/${documentId}/edit`, hubspotUrl: matchedCompany ? `${HUBSPOT_COMPANY_URL}/${matchedCompany.id}` : '', }); - const insertResult = await googleDriveController.insertRowToSheet(sheetsClient, sheetId, Object.values(rowData)); + const insertResult = await callFunctionWithRetry(() => googleDriveController.insertRowToSheet(sheetsClient, sheetId, Object.values(rowData))); if(!insertResult) throw createCustomError("INSERT_ROW_FAILED"); fs.unlinkSync(outputPath); } catch (error) { fs.unlinkSync(outputPath); throw error; } +}; + +export const logUploadProcess = async (videoInfo: VideoInfo) => { + try { + const videoId = videoInfo.id; + const title = videoInfo.title; + const startsAt = videoInfo.starts_at; + const endsAt = videoInfo.ends_at; + const accessPermission = videoInfo.access_permission; + const hostId = videoInfo.host.login_id; + const hostName = videoInfo.host.user_name; + const speechRecognition = videoInfo.speech_recognition.raw; + + if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return; + + // ===== Init ===== + const googleAuth = await googleDriveController.getAuth(); + const driveClient = googleDriveController.getDriveClient(googleAuth); + const docsClient = googleDriveController.getDocsClient(googleAuth); + const sheetsClient = googleDriveController.getSheetsClient(googleAuth); + + const jstStartsAt = dateController.convertToJst(startsAt); + const jstEndsAt = dateController.convertToJst(endsAt); + const fileName = fileController.createMinutesFileName(title, hostName, jstStartsAt); + const videoUrl = `${MIITEL_URL}app/video/${videoId}`; + + + // ===== Save Request Log to Google Drive ===== + if (!fs.existsSync(FILE_PATH)) fs.mkdirSync(FILE_PATH, { recursive: true }); + outputPath = path.join(FILE_PATH, fileName + '.zip'); + const createZip = await fileController.createZip(videoInfo, outputPath, fileName); + if(!createZip) throw createCustomError("CREATE_ZIP_FILE_FAILED"); + + const logFileId = await callFunctionWithRetry(() => googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, `${fileName}.zip`, "application/zip")); + if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED"); + } catch(error) { + throw error; + } }; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/storage.ts b/functions/generate_minutes/src/logics/storage.ts index 8275145..f6ea454 100644 --- a/functions/generate_minutes/src/logics/storage.ts +++ b/functions/generate_minutes/src/logics/storage.ts @@ -1,5 +1,7 @@ import { Storage } from "@google-cloud/storage"; +import { Files } from "@google/genai"; import zlib from "zlib"; +import { CLOUD_STORAGE_LOG_FOLDER_NAME } from "../../serverConfig"; const csClient = new Storage({projectId: process.env.PROJECT_ID}); const BUCKET_NAME = process.env.CLOUD_STORAGE_BUCKET_NAME || ''; @@ -42,4 +44,19 @@ export const storageController = { return false; } }, + getFileList: async(): Promise => { + try { + const files = await bucket.getFiles({ + prefix: 'request_log/', + }); + const list = []; + for(const f of files[0]) { + // console.log(f.name) + list.push(f.name); + } + return list; + } catch(error) { + return null; + } + } }; diff --git a/functions/generate_minutes/src/stores/errorCodes.ts b/functions/generate_minutes/src/stores/errorCodes.ts index 3ae530f..de2c313 100644 --- a/functions/generate_minutes/src/stores/errorCodes.ts +++ b/functions/generate_minutes/src/stores/errorCodes.ts @@ -20,6 +20,9 @@ export const ERROR_DEFINITIONS = { GET_SHEET_ID_FAILED: { code: "E3008", message: "スプレッドシートID取得に失敗しました", statusCode: 500 }, CREATE_ZIP_FILE_FAILED: { code: "E3009", message: "ZIPファイルの作成に失敗しました", statusCode: 500 }, INSERT_ROW_FAILED: { code: "E3009", message: "シートへのデータ追加に失敗しました", statusCode: 500 }, + + GET_FILES_FAILED: { code: "E3010", message: "ファイルの取得に失敗しました", statusCode: 500 }, + CREATE_MEETING_LOG_FAILED: { code: "E3011", message: "ミーティングログ作成に失敗しました", statusCode: 500 }, } as const; export type ErrorKey = keyof typeof ERROR_DEFINITIONS; From 1259ba76c9106d5e7684537caf58d801e1284e2f Mon Sep 17 00:00:00 2001 From: kosukesuenaga Date: Tue, 9 Dec 2025 14:57:17 +0900 Subject: [PATCH 5/6] =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E7=92=B0?= =?UTF-8?q?=E5=A2=83=E3=83=87=E3=83=97=E3=83=AD=E3=82=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api-gateway/create_api_dev.sh | 2 +- api-gateway/deploy_dev.sh | 6 +- api-gateway/openapi_dev.yaml | 191 +++++++++++++++++- functions/generate_minutes/.gcloudignore | 24 +++ .../generate_minutes/deploy_function_dev.sh | 32 +++ functions/generate_minutes/src/apiRouter.ts | 42 ++-- 6 files changed, 267 insertions(+), 30 deletions(-) create mode 100644 functions/generate_minutes/.gcloudignore create mode 100644 functions/generate_minutes/deploy_function_dev.sh diff --git a/api-gateway/create_api_dev.sh b/api-gateway/create_api_dev.sh index 1e97276..0ec87ab 100755 --- a/api-gateway/create_api_dev.sh +++ b/api-gateway/create_api_dev.sh @@ -7,7 +7,7 @@ PROJECT_ID="datacom-poc" API_NAME="sales-tool-api" # プロジェクトを設定 -gcloud auth application-default set-quota-project dmiru-dev +gcloud auth application-default set-quota-project $PROJECT_ID gcloud config set project $PROJECT_ID # API Gatewayを作成 diff --git a/api-gateway/deploy_dev.sh b/api-gateway/deploy_dev.sh index b7ad6a2..37f45f7 100755 --- a/api-gateway/deploy_dev.sh +++ b/api-gateway/deploy_dev.sh @@ -2,11 +2,11 @@ # 環境変数 API_NAME="sales-tool-api" -API_CONFIG_NAME="sales-tool-gw-dev-conf-20250619" -GATEWAY_NAME="sales-tool-gw-dev" +API_CONFIG_NAME="sales-tool-gw-dev-conf-20251209" +GATEWAY_NAME="sales-tool-gw-dev-20251209" OPENAPI_SPEC="openapi_dev.yaml" PROJECT_ID="datacom-poc" -SERVICE_ACCOUNT="api-gateway-mpos@datacom-poc.iam.gserviceaccount.com" +SERVICE_ACCOUNT="mrt-apigateway-sa-devtest@datacom-poc.iam.gserviceaccount.com" LOCATION="asia-northeast1" gcloud auth application-default set-quota-project $PROJECT_ID diff --git a/api-gateway/openapi_dev.yaml b/api-gateway/openapi_dev.yaml index ad2a432..6fa1524 100755 --- a/api-gateway/openapi_dev.yaml +++ b/api-gateway/openapi_dev.yaml @@ -6,13 +6,191 @@ info: schemes: - 'https' paths: - /trigger-minutes-workflow-from-miitel: + /miitel: post: - description: 'ワークフロー呼び出し処理' - operationId: 'trigger-minutes-workflow-from-miitel' + description: 'Miitel Webhook Processer' + operationId: 'miitel' x-google-backend: - address: https://asia-northeast1-datacom-poc.cloudfunctions.net/mrt-trigger-minutes-workflow-from-miitel + address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/miitel path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /dailyBatch: + post: + description: 'get companies and owners' + operationId: 'dailyBatch' + x-google-backend: + address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/dailyBatch + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /getLog: + post: + description: 'get log' + operationId: 'getLog' + x-google-backend: + address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/getLog + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /reExecute: + post: + description: '' + operationId: 'reExecute' + x-google-backend: + address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/reExecute + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /test: + post: + description: 'test' + operationId: 'test' + x-google-backend: + address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/test + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes + deadline: 600 consumes: - application/json produces: @@ -49,10 +227,7 @@ paths: - APIKeyHeader: [] options: summary: 'CORS support' - operationId: 'trigger-minutes-workflow-from-miitel-options' - x-google-backend: - address: https://asia-northeast1-datacom-poc.cloudfunctions.net/mrttrigger-minutes-workflow-from-miitel - path_translation: CONSTANT_ADDRESS + operationId: 'test-options' responses: 204: description: 'CORS preflight' diff --git a/functions/generate_minutes/.gcloudignore b/functions/generate_minutes/.gcloudignore new file mode 100644 index 0000000..8fbd543 --- /dev/null +++ b/functions/generate_minutes/.gcloudignore @@ -0,0 +1,24 @@ +# This file specifies files that are *not* uploaded to Google Cloud +# using gcloud. It follows the same syntax as .gitignore, with the addition of +# "#!include" directives (which insert the entries of the given .gitignore-style +# file at that point). +# +# For more information, run: +# $ gcloud topic gcloudignore +# +.gcloudignore +# If you would like to upload your .git directory, .gitignore file or files +# from your .gitignore file, remove the corresponding line +# below: +.git +.gitignore + +node_modules + +.env_dev +.env_prod + +deploy_function_dev.sh + +package-lock.json + diff --git a/functions/generate_minutes/deploy_function_dev.sh b/functions/generate_minutes/deploy_function_dev.sh new file mode 100644 index 0000000..fc372b2 --- /dev/null +++ b/functions/generate_minutes/deploy_function_dev.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +# プロジェクトIDを設定 +PROJECT_ID="datacom-poc" + +# デプロイする関数名 +FUNCTION_NAME="generate_minutes" + +# 関数のエントリポイント +ENTRY_POINT="helloHttp" + +# ランタイム +RUNTIME="nodejs22" + +# リージョン +REGION="asia-northeast1" + +# 環境変数ファイル +ENV_VARS_FILE=".env_dev" + +gcloud auth application-default set-quota-project $PROJECT_ID +gcloud config set project $PROJECT_ID + +gcloud functions deploy $FUNCTION_NAME \ + --gen2 \ + --region $REGION \ + --runtime $RUNTIME \ + --source=. \ + --entry-point $ENTRY_POINT \ + --env-vars-file $ENV_VARS_FILE \ + --trigger-http \ + --timeout 600s \ diff --git a/functions/generate_minutes/src/apiRouter.ts b/functions/generate_minutes/src/apiRouter.ts index 6077066..03efbaf 100644 --- a/functions/generate_minutes/src/apiRouter.ts +++ b/functions/generate_minutes/src/apiRouter.ts @@ -6,6 +6,7 @@ import { hubspotController } from "./logics/hubspot"; import { createCustomError, responseError } from "./logics/error"; import { CLOUD_STORAGE_LOG_FOLDER_NAME, CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, OWNERS_FILE_NAME } from "../serverConfig"; import { Delay } from "cerceis-lib"; +import { googleDriveController } from "./logics/googleDrive"; const router = express.Router(); @@ -13,6 +14,10 @@ const router = express.Router(); router.post("/miitel", async (req, res) => { try { const body = req.body; + + if('challenge' in body) return res.status(200).contentType('text/plain').send(body.challenge); + + const parsedBody = MiiTelWebhookSchema.safeParse(body); if (!parsedBody.success) throw createCustomError("ZOD_FAILED"); @@ -23,9 +28,10 @@ router.post("/miitel", async (req, res) => { await processRequest(videoInfo); // if(!result) throw - res.status(200).send("ok"); + return res.status(200).send("ok"); } catch(err) { - responseError(err, res) + responseError(err, res); + return; } }); @@ -118,22 +124,22 @@ router.post("/logUpload", async (req, res) => { // res.send('ok'); // }); -// router.post("/test", async (req, res) => { -// try { +router.post("/test", async (req, res) => { + try { -// const googleAuth = await googleDriveController.getAuth(); -// const driveClilent = googleDriveController.getDriveClient(googleAuth); -// const sheetsClient = googleDriveController.getSheetsClient(googleAuth); -// const folderId = await googleDriveController.searchFileIdByFileName(driveClilent, MINUTES_CREATION_HISTORY_FOLDER_ID, '2025'); -// if(!folderId) throw new Error() -// // console.log(fileId); -// // const sheetId = await googleDriveController.getLogSheetId(driveClilent, sheetsClient, folderId, 'test1'); -// // console.log('sheet id : ', sheetId); -// res.send("ok"); -// } catch (error) { -// console.error("Error in /test endpoint:", error); -// res.status(500).send("Error in /test endpoint"); -// } -// }); + // const googleAuth = await googleDriveController.getAuth(); + // const driveClilent = googleDriveController.getDriveClient(googleAuth); + // const sheetsClient = googleDriveController.getSheetsClient(googleAuth); + // const folderId = await googleDriveController.searchFileIdByFileName(driveClilent, MINUTES_CREATION_HISTORY_FOLDER_ID, '2025'); + // if(!folderId) throw new Error() + // console.log(fileId); + // const sheetId = await googleDriveController.getLogSheetId(driveClilent, sheetsClient, folderId, 'test1'); + // console.log('sheet id : ', sheetId); + res.send("ok"); + } catch (error) { + console.error("Error in /test endpoint:", error); + res.status(500).send("Error in /test endpoint"); + } +}); export default router; \ No newline at end of file From 6454e1b46befd28965ed36bb6248443ff1a8dbbd Mon Sep 17 00:00:00 2001 From: kosukesuenaga Date: Wed, 24 Dec 2025 11:36:34 +0900 Subject: [PATCH 6/6] 20251224 --- .gitignore | 23 +- _test/test_dev.sh | 16 - api-gateway/openapi.yaml | 319 +++++++++++++++++- cloudbuild_dev.yaml | 196 ----------- cloudbuild_prod.yaml | 193 ----------- functions/generate_minutes/.gcloudignore | 3 + functions/generate_minutes/package.json | 5 +- functions/generate_minutes/serverConfig.ts | 4 +- functions/generate_minutes/src/apiRouter.ts | 117 ++++--- functions/generate_minutes/src/logics/ai.ts | 95 +++++- functions/generate_minutes/src/logics/file.ts | 5 +- .../generate_minutes/src/logics/fuzzyMatch.ts | 8 +- .../src/logics/googleDrive.ts | 104 ++++-- .../generate_minutes/src/logics/hubspot.ts | 22 +- .../generate_minutes/src/logics/process.ts | 53 ++- .../generate_minutes/src/logics/storage.ts | 21 +- .../generate_minutes/src/stores/errorCodes.ts | 33 +- terraform/prod/{initial => IAM}/main.tf | 32 +- terraform/prod/scheduler/main.tf | 29 +- 19 files changed, 667 insertions(+), 611 deletions(-) delete mode 100755 _test/test_dev.sh delete mode 100755 cloudbuild_dev.yaml delete mode 100755 cloudbuild_prod.yaml rename terraform/prod/{initial => IAM}/main.tf (60%) diff --git a/.gitignore b/.gitignore index cce9094..65534ab 100755 --- a/.gitignore +++ b/.gitignore @@ -1,23 +1,10 @@ -handle-company-webhook/ - terraform.* .terraform* -IAM/ - -test/ - -venv/ -__pycache__/ -*.csv - -request.json - node_modules/ dist/ -.env_dev -.env -.env_prod -credentials.json -credentials_dev.json -package-lock.json \ No newline at end of file +.env* +credentials* +package-lock.json +*.sh +log/ \ No newline at end of file diff --git a/_test/test_dev.sh b/_test/test_dev.sh deleted file mode 100755 index e17dc07..0000000 --- a/_test/test_dev.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -# APIエンドポイントURL -API_URL="https://sales-tool-gw-dev-ex1cujb.an.gateway.dev/trigger-minutes-workflow-from-miitel" - -# APIキー(ヘッダーに付与する場合) -API_KEY="AIzaSyBVJOtvJTB4noAfUGEyMhCRqsF5yfypENc" - -# リクエストボディ -JSON_FILE="request.json" - -# curlコマンド実行 -curl -X POST "$API_URL" \ - -H "Content-Type: application/json" \ - -H "x-api-key: $API_KEY" \ - -d @"$JSON_FILE" diff --git a/api-gateway/openapi.yaml b/api-gateway/openapi.yaml index c056fc8..2060434 100755 --- a/api-gateway/openapi.yaml +++ b/api-gateway/openapi.yaml @@ -5,10 +5,6 @@ info: version: '1.0.0' schemes: - 'https' -host: 'crate-minutes-gw-a8slsa47.an.gateway.dev' -x-google-endpoints: - - name: 'crate-minutes-gw-a8slsa47.an.gateway.dev' - allowCors: True paths: /create-minutes: post: @@ -70,6 +66,321 @@ paths: Access-Control-Allow-Headers: type: string default: 'Content-Type, x-api-key' + + /miitel: + post: + description: 'Miitel Webhook Processer' + operationId: 'miitel' + x-google-backend: + address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/miitel + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 400: + description: 'Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /dailyBatch: + post: + description: 'get companies and owners' + operationId: 'dailyBatch' + x-google-backend: + address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/dailyBatch + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 400: + description: 'Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /getLog: + post: + description: 'get log' + operationId: 'getLog' + x-google-backend: + address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/getLog + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 400: + description: 'Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /reExecute: + post: + description: '' + operationId: 'reExecute' + x-google-backend: + address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/reExecute + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 400: + description: 'Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + security: + - APIKeyHeader: [] + + /test: + post: + description: 'test' + operationId: 'test' + x-google-backend: + address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/test + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 400: + description: 'Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + + /alertTest: + post: + description: 'alertTest' + operationId: 'alertTest' + x-google-backend: + address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/alertTest + path_translation: CONSTANT_ADDRESS + jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes + deadline: 600 + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: JSON payload + required: false + schema: + type: object + additionalProperties: true + responses: + 200: + description: 'OK' + schema: + type: object + additionalProperties: true + 401: + description: 'Auth Error' + schema: + type: object + properties: + error: + type: string + 500: + description: 'Error' + schema: + type: object + properties: + error: + type: string + + security: + - APIKeyHeader: [] + options: + summary: 'CORS support' + operationId: 'test-options' + responses: + 204: + description: 'CORS preflight' + headers: + Access-Control-Allow-Origin: + type: string + default: '*' + Access-Control-Allow-Methods: + type: string + default: 'GET, POST, OPTIONS' + Access-Control-Allow-Headers: + type: string + default: 'Content-Type, x-api-key' + securityDefinitions: APIKeyHeader: type: apiKey diff --git a/cloudbuild_dev.yaml b/cloudbuild_dev.yaml deleted file mode 100755 index 757cb07..0000000 --- a/cloudbuild_dev.yaml +++ /dev/null @@ -1,196 +0,0 @@ -substitutions: - _ENV: 'dev' - _CF_SERVICE_ACCOUNT: 'mrt-cloudfunctions-sa-devtest' - _CW_SERVICE_ACCOUNT: 'mrt-cloudworkflows-sa-devtest' - -options: - logging: CLOUD_LOGGING_ONLY - -steps: - # 会社一覧取得 - - id: 'gcloud functions deploy mrt-export-companies-to-gcs' - name: gcr.io/cloud-builders/gcloud - dir: 'functions/export-companies-to-gcs' - args: [ - 'functions', - 'deploy', - 'mrt-export-companies-to-gcs', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # 担当者一覧取得 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/export-owners-to-gcs' - args: [ - 'functions', - 'deploy', - 'mrt-export-owners-to-gcs', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # スプレッドシート作成 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/create-log-sheet' - args: [ - 'functions', - 'deploy', - 'mrt-create-log-sheet', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # ワークフロー呼び出し関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/trigger-minutes-workflow-from-miitel' - args: [ - 'functions', - 'deploy', - 'mrt-trigger-minutes-workflow-from-miitel', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # 議事録作成関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/generate-meeting-minutes' - args: [ - 'functions', - 'deploy', - 'mrt-generate-meeting-minutes', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--cpu=0.5', - '--memory=1Gi', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--timeout=10m', - '--quiet', - ] - waitFor: ['-'] - - # 議事録をドライブへアップロードする関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/upload-minutes-to-drive' - args: [ - 'functions', - 'deploy', - 'mrt-upload-minutes-to-drive', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--cpu=0.5', - '--memory=1Gi', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # Hubspot連携関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/create-hubspot-meeting-log' - args: [ - 'functions', - 'deploy', - 'mrt-create-hubspot-meeting-log', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # スプレッドシートへ記録 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/append-log-to-sheet' - args: [ - 'functions', - 'deploy', - 'mrt-append-log-to-sheet', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_dev', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # ワークフロー - - name: gcr.io/cloud-builders/gcloud - dir: 'workflows/workflow-create-minutes' - args: - [ - 'workflows', - 'deploy', - 'mrt-workflow-create-minutes', - '--location=asia-northeast1', - '--source=main.yaml', - '--service-account=$_CW_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--quiet', - ] diff --git a/cloudbuild_prod.yaml b/cloudbuild_prod.yaml deleted file mode 100755 index 87f56d4..0000000 --- a/cloudbuild_prod.yaml +++ /dev/null @@ -1,193 +0,0 @@ -substitutions: - _ENV: 'prod' - _CF_SERVICE_ACCOUNT: 'mrt-cloudfunctions-sa' - _CW_SERVICE_ACCOUNT: 'mrt-cloudworkflows-sa' - -steps: - # 会社一覧取得 - - id: 'gcloud functions deploy mrt-export-companies-to-gcs' - name: gcr.io/cloud-builders/gcloud - dir: 'functions/export-companies-to-gcs' - args: [ - 'functions', - 'deploy', - 'mrt-export-companies-to-gcs', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # 担当者一覧取得 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/export-owners-to-gcs' - args: [ - 'functions', - 'deploy', - 'mrt-export-owners-to-gcs', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # スプレッドシート作成 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/create-log-sheet' - args: [ - 'functions', - 'deploy', - 'mrt-create-log-sheet', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # ワークフロー呼び出し関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/trigger-minutes-workflow-from-miitel' - args: [ - 'functions', - 'deploy', - 'mrt-trigger-minutes-workflow-from-miitel', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # 議事録作成関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/generate-meeting-minutes' - args: [ - 'functions', - 'deploy', - 'mrt-generate-meeting-minutes', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--cpu=0.5', - '--memory=1Gi', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--timeout=10m', - '--quiet', - ] - waitFor: ['-'] - - # 議事録をドライブへアップロードする関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/upload-minutes-to-drive' - args: [ - 'functions', - 'deploy', - 'mrt-upload-minutes-to-drive', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--cpu=0.5', - '--memory=1Gi', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # Hubspot連携関数 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/create-hubspot-meeting-log' - args: [ - 'functions', - 'deploy', - 'mrt-create-hubspot-meeting-log', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # スプレッドシートへ記録 - - name: gcr.io/cloud-builders/gcloud - dir: 'functions/append-log-to-sheet' - args: [ - 'functions', - 'deploy', - 'mrt-append-log-to-sheet', - '--gen2', - '--runtime=python312', - '--region=asia-northeast1', - '--source=./source', # dir で切り替えているので「.」 - '--entry-point=handle_request', # 変更する場合はここ - '--trigger-http', - '--no-allow-unauthenticated', - '--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--env-vars-file=.env_prod', - '--project=$PROJECT_ID', - '--quiet', - ] - waitFor: ['-'] - - # ワークフロー - - name: gcr.io/cloud-builders/gcloud - dir: 'workflows/workflow-create-minutes' - args: - [ - 'workflows', - 'deploy', - 'mrt-workflow-create-minutes', - '--location=asia-northeast1', - '--source=main.yaml', - '--service-account=$_CW_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com', - '--quiet', - ] diff --git a/functions/generate_minutes/.gcloudignore b/functions/generate_minutes/.gcloudignore index 8fbd543..e773e5a 100644 --- a/functions/generate_minutes/.gcloudignore +++ b/functions/generate_minutes/.gcloudignore @@ -19,6 +19,9 @@ node_modules .env_prod deploy_function_dev.sh +deploy_function_prod.sh + +files/ package-lock.json diff --git a/functions/generate_minutes/package.json b/functions/generate_minutes/package.json index beeb1e8..dcae153 100644 --- a/functions/generate_minutes/package.json +++ b/functions/generate_minutes/package.json @@ -5,8 +5,8 @@ "scripts": { "build": "tsc", "start": "npm run build && functions-framework --target=helloHttp --port=8080 --source=dist/index.js", - "debug": "dotenv -e .env_dev -- node --inspect node_modules/.bin/functions-framework --source=dist/index.js --target=helloHttp", - "watch": "concurrently \"dotenv -e .env_dev -- npm run build -- --watch\" \"dotenv -e .env_dev -- nodemon --watch ./dist/ --exec npm run debug\"" + "debug": "dotenv -e .env_prod -- node --inspect node_modules/.bin/functions-framework --source=dist/index.js --target=helloHttp", + "watch": "concurrently \"dotenv -e .env_prod -- npm run build -- --watch\" \"dotenv -e .env_prod -- nodemon --watch ./dist/ --exec npm run debug\"" }, "devDependencies": { "@google-cloud/functions-framework": "^3.0.0", @@ -29,6 +29,7 @@ "express": "^4.21.2", "fast-fuzzy": "^1.12.0", "googleapis": "^105.0.0", + "marked": "^17.0.1", "zod": "^4.1.13" } } diff --git a/functions/generate_minutes/serverConfig.ts b/functions/generate_minutes/serverConfig.ts index f547d2f..2509d54 100644 --- a/functions/generate_minutes/serverConfig.ts +++ b/functions/generate_minutes/serverConfig.ts @@ -1,6 +1,6 @@ -export const GEMINI_MODEL_ID = "gemini-2.5-flash"; -export const DEBUG = true; +export const GEMINI_MODEL_ID = "gemini-2.5-pro"; +export const DEBUG = false; export const CLOUD_STORAGE_MASTER_FOLDER_NAME = "master"; export const CLOUD_STORAGE_LOG_FOLDER_NAME = "new_request_log"; diff --git a/functions/generate_minutes/src/apiRouter.ts b/functions/generate_minutes/src/apiRouter.ts index 03efbaf..60a190d 100644 --- a/functions/generate_minutes/src/apiRouter.ts +++ b/functions/generate_minutes/src/apiRouter.ts @@ -1,12 +1,15 @@ import express from "express"; import zlib from "zlib"; import { storageController } from "./logics/storage"; -import { MiiTelWebhookSchema, processRequest } from "./logics/process"; +import { logUploadProcess, MiiTelWebhookSchema, processRequest, testProcess } from "./logics/process"; import { hubspotController } from "./logics/hubspot"; import { createCustomError, responseError } from "./logics/error"; import { CLOUD_STORAGE_LOG_FOLDER_NAME, CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, OWNERS_FILE_NAME } from "../serverConfig"; import { Delay } from "cerceis-lib"; -import { googleDriveController } from "./logics/googleDrive"; +import path from "path"; +import fs from "fs"; +import { fuzzyMatchController } from "./logics/fuzzyMatch"; + const router = express.Router(); @@ -30,8 +33,7 @@ router.post("/miitel", async (req, res) => { return res.status(200).send("ok"); } catch(err) { - responseError(err, res); - return; + return responseError(err, res); } }); @@ -41,18 +43,19 @@ router.post("/dailyBatch", async (req, res) => { console.log("Starting daily batch process..."); // export companies to GCS const companies = await hubspotController.getCompanies(); - if(!companies) throw createCustomError("GET_OWNERS_FAILED"); + if(!companies) throw createCustomError("GET_COMPANIES_FAILED"); await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, JSON.stringify(companies), 'application/json'); // export owners to GCS const owners = await hubspotController.getOwners(); - if(!owners) throw createCustomError("GET_COMPANIES_FAILED"); + if(!owners) throw createCustomError("GET_OWNERS_FAILED"); await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME, JSON.stringify(owners), 'application/json'); res.status(200).send("Daily batch executed."); } catch (error) { console.error("Error in daily batch:", error); + return res.status(400).send("Error executing daily batch."); } }); @@ -60,11 +63,13 @@ router.post("/dailyBatch", async (req, res) => { router.post("/getLog", async (req, res) => { console.log(req.body); const meetingId = req.body.meetingId; - const exist = await storageController.existsInGCS("request_log", "test.json.gz"); + const exist = await storageController.existsInGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${meetingId}.json.gz`); console.log("Log exists:", exist); - const log = await storageController.loadFromGCS("request_log", meetingId + ".json.gz"); - console.log(log) - res.send(log); + const log = await storageController.loadFromGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, meetingId + ".json.gz"); + if(!log) throw Error(); + const params = MiiTelWebhookSchema.parse(JSON.parse(log)); + // console.log(params) + res.send(params); }); @@ -77,11 +82,12 @@ router.post("/reExecute", async (req, res) => { const log = await storageController.loadFromGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${meetingId}.json.gz`); if(!log) throw Error(); const params = MiiTelWebhookSchema.safeParse(JSON.parse(log)); + console.log(params); if(!params.success) throw createCustomError("ZOD_FAILED"); params.data.video.title = newTitle; // console.log(params.data.video) - await processRequest(params.data.video); + // await processRequest(params.data.video); res.send(log); } catch(error) { @@ -92,28 +98,39 @@ router.post("/reExecute", async (req, res) => { // 過去のログを全てGoogle Driveへアップロード -router.post("/logUpload", async (req, res) => { - try { - const list = await storageController.getFileList(); - if(!list) throw createCustomError("GET_FILES_FAILED"); - for(const l of list){ - console.log(l); - const fileName = l.split('/')[1] - const log = await storageController.loadFromGCS('request_log', fileName); - if(!log) throw createCustomError("GET_FILES_FAILED"); - // console.log(log); - const parsedLog = MiiTelWebhookSchema.safeParse(JSON.parse(log)); - if(!parsedLog.success) throw createCustomError("ZOD_FAILED"); - console.log(parsedLog.data.video.title); - - await Delay(500); - } - res.send('ok'); - } catch(error) { - console.log(error); - res.status(400).send("Failed"); - } -}); +// router.post("/logUpload", async (req, res) => { +// try { +// const list = await storageController.getFileList(); +// if(!list) throw createCustomError("GET_FILES_FAILED"); +// console.log("Total files to process:", list.length); +// const failedFiles: string[] = []; +// let count = 0; +// const tmplist = list.slice(1600,1800); +// for(const l of tmplist){ +// console.log(l); +// count++; +// console.log(`Processing file ${count} of ${tmplist.length}`); +// const fileName = l.split('/')[1] +// const log = await storageController.loadFromGCS('request_log', fileName); +// if(!log) { +// failedFiles.push(fileName); +// continue; +// }; +// const parsedLog = MiiTelWebhookSchema.safeParse(JSON.parse(log)); +// if(!parsedLog.success) throw createCustomError("ZOD_FAILED"); +// console.log(parsedLog.data.video.title); +// const result = await logUploadProcess(parsedLog.data.video); +// if(!result) failedFiles.push(fileName); +// await Delay(500); +// } +// const outputPath = path.join(__dirname, "../log/", 'failedFiles.json'); +// fs.writeFileSync(outputPath, JSON.stringify(failedFiles, null, 2)); +// res.send('ok'); +// } catch(error) { +// console.log(error); +// res.status(400).send("Failed"); +// } +// }); // router.post("/deleteFile", async (req, res) => { // console.log(req.body); @@ -126,20 +143,32 @@ router.post("/logUpload", async (req, res) => { router.post("/test", async (req, res) => { try { - - // const googleAuth = await googleDriveController.getAuth(); - // const driveClilent = googleDriveController.getDriveClient(googleAuth); - // const sheetsClient = googleDriveController.getSheetsClient(googleAuth); - // const folderId = await googleDriveController.searchFileIdByFileName(driveClilent, MINUTES_CREATION_HISTORY_FOLDER_ID, '2025'); - // if(!folderId) throw new Error() - // console.log(fileId); - // const sheetId = await googleDriveController.getLogSheetId(driveClilent, sheetsClient, folderId, 'test1'); - // console.log('sheet id : ', sheetId); + await testProcess(); res.send("ok"); } catch (error) { - console.error("Error in /test endpoint:", error); - res.status(500).send("Error in /test endpoint"); + console.error(error); + res.status(400).send("Error in /test endpoint"); } }); + +router.post("/alertTest", async (_req, res) => { + res.status(500).send("Error"); +}); + +// router.post("/debug", async (req, res) => { +// try { +// const a = await fuzzyMatchController.searchMatchedCompany("Aコープ九"); +// console.log(a); +// res.send("ok"); +// } catch (error) { +// console.error(error); +// res.status(400).send("Error in /test endpoint"); +// } +// }); + + + + + export default router; \ No newline at end of file diff --git a/functions/generate_minutes/src/logics/ai.ts b/functions/generate_minutes/src/logics/ai.ts index 5ba41ad..c16f019 100644 --- a/functions/generate_minutes/src/logics/ai.ts +++ b/functions/generate_minutes/src/logics/ai.ts @@ -8,20 +8,91 @@ const aiClient = new GoogleGenAI({ export const aiController = { generateMinutes: async(text: string): Promise => { const prompt = ` - あなたは議事録作成のプロフェッショナルです。以下の「文字起こし結果」は営業マンが録音した商談の文字起こしです。以下の制約条件に従い、最高の商談報告の議事録を作成してください。 +あなたは、流通・小売・飲食業界向けにシステムを提供する「データコム株式会社」の優秀な営業アシスタントです。 +以下の[文字起こし結果]をもとに、関係者(社内および顧客・パートナー)に共有するための、正確で可読性の高い議事録を作成してください。 - 制約条件: - 1. 文字起こし結果にはAIによる書き起こしミスがある可能性を考慮してください。 - 2. 冒頭に主要な「決定事項」と「アクションアイテム」をまとめてください。 - 3. 議論のポイントを議題ごとに要約してください。 - 4. 見出しや箇条書きを用いて、情報が探しやすい構造で簡潔かつ明瞭に記述してください。 - 5. 要約は500文字以内に収めてください。 - 6. 箇条書き形式で簡潔にまとめてください。 - 7. マークダウン記法は使わず、各項目を「■」や「・」等を使って見やすくしてください。 +# 前提条件 +* **当社(データコム株式会社):** システム開発会社。店舗分析、DX、業務効率化システムなどを提案・提供する立場。 +* **相手:** + * パターンA(エンドユーザー): 小売業(スーパー等)や飲食業の経営層・現場担当者。現場の課題や予算について話す。 + * パターンB(パートナー): システム会社、代理店、POSメーカー等。協業、API連携、紹介案件について話す。 +* **入力データ:** 対面会議のスマホ録音が含まれるため、話者ラベル(Speaker A, B等)は不正確です。必ず「発言内容」から誰が話しているかを判断してください。 - 文字起こし結果: - ${text} - ` +# 重要:専門用語・表記ルール(辞書) +文字起こし結果に誤字や、以下の「読み」に近い表現があった場合、必ず「正しい表記」に修正・統一してください。 + +| カテゴリ | 正しい表記 | 読み・備考 | +| :--- | :--- | :--- | +| **社名** | データコム | でーたこむ(当社) | +| **製品・サービス** | ID-POS | あいでぃーぽす | +| | ArmBox | あーむぼっくす | +| | RV | あーるぶい | +| | MS-View | えむえすびゅー | +| | AWS | えーだぶりゅーえす | +| | CustomerJournal (CJ) | かすたまーじゃーなる / しーじぇー | +| | Tiramisu | てぃらみす(お菓子ではなくシステム名) | +| | TerraMap | てらまっぷ | +| | d@Journal | でぃーあっと | +| | d3 | でぃーすりー | +| | D-PLAN | でぃーぷらん | +| | PV | ぴーぶい | +| | FreshO2 | ふれっしゅおーつー | +| | Point View | ぽいんとびゅー | +| | Retail View | りてーるびゅー | +| **一般・業界用語** | RFP | あーるえふぴー(提案依頼書) | +| | ジャーナルデータ | じゃーなるでーた | +| | 帳票 | ちょうひょう | +| | DWH | でぃーだぶりゅーえっち | +| | POS | ぽす | +| | CUBIC | きゅーびっく | +| | NOCC | のっく | +| **人名(当社関係者)**| 新垣、曽田、瀧本、田邊、會田 | しんがき、そだ、たきもと、たなべ、あいた | +| | 永倉、早坂、松浦、松永 | ながくら、はやさか、まつうら、まつなが | + +# 思考・処理ステップ +1. **用語の補正:** 上記の辞書に基づき、製品名や人名の誤変換を脳内で修正する。(例:「てぃらみすが」→「Tiramisuが」) +2. **話者の特定:** + * 「システムの説明」「事例の紹介」「持ち帰って検討します(提案側として)」等の発言は「データコム(当社)」とみなす。 + * 「現場のオペレーション」「予算感」「現状のシステムの不満」等の発言は「相手先」とみなす。 +3. **会議タイプの判定:** + * 内容が導入検討・商談であれば「商談報告」モードで作成。 + * 内容が仕様調整・協業・定例であれば「打合せ報告」モードで作成。 +4. **要約と構成:** 単なる会話の羅列ではなく、ロジカルに構造化する。 + +# 出力フォーマット(マークダウン) + +## 会議概要 +* **会議タイプ:** (商談 / パートナー協議 / 定例 etc.) +* **相手先:** (文脈から推測できる会社名や属性。不明な場合は「顧客」) +* **参加者(推測):** (判別できた場合のみ記載。当社: 〇〇 / 相手: 〇〇) +* **要約:** (会議の全体像を300文字以内で簡潔に) + +## 決定事項・合意事項 +* (確定したアクション、合意した条件、次回の予定など) +* ... + +## ネクストアクション(ToDo) +| 担当 | タスク内容 | 期限・備考 | +| :--- | :--- | :--- | +| 当社 | ... | ... | +| 相手 | ... | ... | + +## 議題詳細とポイント +### (議題1のタイトル) +* **現状・課題:** (相手が抱えている悩み、現状のシステム構成など) +* **当社提案・回答:** (データコム側が提示した解決策、機能説明) +* **反応:** (相手の感触、懸念点) + +### (議題2のタイトル) +... + +## 懸念点・確認事項 +* (技術的なハードル、予算の壁、競合の存在など、リスク情報があれば記載) + +--- +[文字起こし結果]: +${text} + `; try { const response = await aiClient.models.generateContent({ diff --git a/functions/generate_minutes/src/logics/file.ts b/functions/generate_minutes/src/logics/file.ts index 42837f0..ae97481 100644 --- a/functions/generate_minutes/src/logics/file.ts +++ b/functions/generate_minutes/src/logics/file.ts @@ -5,9 +5,8 @@ import fs from "fs"; export const fileController = { - createMinutesFileName: (title: string, hostName: string, jstStartsAt: Date): string => { - const dateStr = dateController.getFormattedDate(jstStartsAt, "yyyy年MM月dd日"); - const fileName = `${dateStr} ${title} ${hostName}`; + createMinutesFileName: (title: string, hostName: string, meetingDateStr: string): string => { + const fileName = `${meetingDateStr} ${title.replace('/', '')} ${hostName}`; return fileName; }, extractCompanyNameFromTitle: (title: string) => { diff --git a/functions/generate_minutes/src/logics/fuzzyMatch.ts b/functions/generate_minutes/src/logics/fuzzyMatch.ts index 3d90584..fad7f7f 100644 --- a/functions/generate_minutes/src/logics/fuzzyMatch.ts +++ b/functions/generate_minutes/src/logics/fuzzyMatch.ts @@ -15,13 +15,13 @@ export const fuzzyMatchController = { if(!parsedCompanies.success) return null; const normalizedCompanyName = fuzzyMatchController.normalizeCompanyName(companyName); - const normalizedCompanies: Company[] = parsedCompanies.data.map((c) => CompanySchema.parse({ + const companies: Company[] = parsedCompanies.data.map((c) => CompanySchema.parse({ id: c.id, - name: fuzzyMatchController.normalizeCompanyName(c.name), + name: c.name, })); // Exact Match - const exactMatchedCompany = fuzzyMatchController.searchExactMatchedCompany(normalizedCompanyName, normalizedCompanies); + const exactMatchedCompany = fuzzyMatchController.searchExactMatchedCompany(normalizedCompanyName, companies); // console.log(exactMatchedCompanyId); if(exactMatchedCompany) return exactMatchedCompany; @@ -56,7 +56,7 @@ export const fuzzyMatchController = { }, searchExactMatchedCompany: (companyName: string, companies: Company[]): Company | null => { for(const company of companies) { - if(companyName === company.name) return company; + if(companyName === fuzzyMatchController.normalizeCompanyName(company.name)) return company; }; return null; }, diff --git a/functions/generate_minutes/src/logics/googleDrive.ts b/functions/generate_minutes/src/logics/googleDrive.ts index 509b3bd..eff15a7 100644 --- a/functions/generate_minutes/src/logics/googleDrive.ts +++ b/functions/generate_minutes/src/logics/googleDrive.ts @@ -1,12 +1,12 @@ import { docs_v1, drive_v3, google, sheets_v4 } from "googleapis"; import fs from "fs"; -import { DEBUG, LOG_SHEET_HEADER_VALUES, SHEET_MIMETYPE } from "../../serverConfig"; +import { DEBUG, DOCUMENT_MIMETYPE, LOG_SHEET_HEADER_VALUES, SHEET_MIMETYPE } from "../../serverConfig"; import z from "zod"; +import { Readable } from "stream"; const GOOGLE_DRIVE_FOLDER_ID = process.env.GOOGLE_DRIVE_FOLDER_ID; const SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] -const MAX_RETRY = 3; export const LogRowDataSchema = z.object({ timestamp: z.string(), @@ -49,10 +49,27 @@ export const googleDriveController = { const docs = google.docs({ version: "v1", auth: auth }); return docs; }, + checkConnection: async (driveClient: drive_v3.Drive): Promise => { + try { + const res = await driveClient.files.list({ + corpora: 'drive', + driveId: GOOGLE_DRIVE_FOLDER_ID, + pageSize: 1, + fields: "files(id, name)", + includeItemsFromAllDrives: true, + includeTeamDriveItems: true, + supportsAllDrives: true + }); + console.log("Google Drive connection check successful:", res.data); + return true; + } catch (error) { + console.error("Error checking Google Drive connection:", error); + return false; + } + }, uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string, contentType: string): Promise => { try { - // console.log("Uploading file to Google Drive:", filePath); const response = await driveClient.files.create({ requestBody: { name: fileName, @@ -62,6 +79,7 @@ export const googleDriveController = { mimeType: contentType, body: fs.createReadStream(filePath), }, + supportsAllDrives: true, }); if(!response.data.id) return null; return response.data.id; @@ -123,6 +141,7 @@ export const googleDriveController = { const file = await driveClient.files.create({ requestBody, + supportsAllDrives: true, // fields: 'id', }); @@ -134,6 +153,35 @@ export const googleDriveController = { return null; } }, + + createMinutesDocument: async(driveClient: drive_v3.Drive, folderId: string, fileName: string, htmlText: string): Promise => { + try { + const requestBody = { + name: fileName, + parents: [folderId], // 作成したフォルダのIDを指定 + mimeType: DOCUMENT_MIMETYPE, + }; + + const media = { + mimeType: 'text/html', + body: Readable.from([htmlText]) + }; + + const file = await driveClient.files.create({ + requestBody, + media, + supportsAllDrives: true, + // fields: 'id', + }); + + console.log('File Id:', file.data); + if (!file.data.id) return null; + return file.data.id; + } catch(err) { + console.error('Error creating file:', err); + return null; + } + }, // CAUTION deleteFile: async (driveClient: drive_v3.Drive, fileId: string) => { try { @@ -147,31 +195,31 @@ export const googleDriveController = { console.error('Error deleting file:', error); } }, - addContentToDocs: async (docsClient: docs_v1.Docs, documentId: string, content: string): Promise => { - try { - const requestBody: docs_v1.Schema$BatchUpdateDocumentRequest = { - requests: [ - { - insertText: { - text: content, - location: { - index: 1, - } - } - } - ] - }; - const response = await docsClient.documents.batchUpdate({ - documentId: documentId, - requestBody: requestBody, - }); - console.log('Content added to document:', response.data); - return true; - } catch (error) { - console.error('Error adding content to document:', error); - return false; - } - }, + // addContentToDocs: async (docsClient: docs_v1.Docs, documentId: string, content: string): Promise => { + // try { + // const requestBody: docs_v1.Schema$BatchUpdateDocumentRequest = { + // requests: [ + // { + // insertText: { + // text: content, + // location: { + // index: 1, + // } + // } + // } + // ] + // }; + // const response = await docsClient.documents.batchUpdate({ + // documentId: documentId, + // requestBody: requestBody, + // }); + // console.log('Content added to document:', response.data); + // return true; + // } catch (error) { + // console.error('Error adding content to document:', error); + // return false; + // } + // }, getLogSheetId: async (driveClient: drive_v3.Drive, sheetsClient: sheets_v4.Sheets, folderId: string, fileName: string): Promise => { try { diff --git a/functions/generate_minutes/src/logics/hubspot.ts b/functions/generate_minutes/src/logics/hubspot.ts index e48aa1c..05a5c21 100644 --- a/functions/generate_minutes/src/logics/hubspot.ts +++ b/functions/generate_minutes/src/logics/hubspot.ts @@ -8,7 +8,7 @@ const hubspotClient = new Client({ accessToken: process.env.HUBSPOT_ACCESS_TOKEN export const CompanySchema = z.object({ id: z.string(), name: z.string(), -}) +}); export const OwnerSchema = z.object({ id: z.string(), @@ -19,9 +19,16 @@ export type Company = z.infer; export type Owner = z.infer; export const hubspotController = { - check: async() => { - const response = await hubspotClient.crm.companies.getAll(); - console.log(response.length); + check: async(): Promise => { + try { + const response = await hubspotClient.crm.companies.getAll(); + console.log(response.length); + console.log("HubSpot connection check successful."); + return true; + } catch (error) { + console.error("HubSpot connection check failed:", error); + return false; + } }, getCompanies: async(): Promise => { try { @@ -33,9 +40,9 @@ export const hubspotController = { const response = await hubspotClient.crm.companies.basicApi.getPage(limit, after); // console.log(response.results); const companies: Company[] = response.results.map((company) => CompanySchema.parse({ - id: company.id, - name: company.properties.name, - })); + id: company.id, + name: company.properties.name ?? '', + })); allCompanies.push(...companies); if(response.paging && response.paging.next && response.paging.next.after) { @@ -46,6 +53,7 @@ export const hubspotController = { } return allCompanies; } catch (error) { + console.error("Error fetching companies:", error); return null; } }, diff --git a/functions/generate_minutes/src/logics/process.ts b/functions/generate_minutes/src/logics/process.ts index 0e56103..9675127 100644 --- a/functions/generate_minutes/src/logics/process.ts +++ b/functions/generate_minutes/src/logics/process.ts @@ -10,6 +10,7 @@ import { storageController } from "./storage"; import { CLOUD_STORAGE_MASTER_FOLDER_NAME, DATE_FORMAT, DATETIME_FORMAT, DOCUMENT_MIMETYPE, OWNERS_FILE_NAME, YM_FORMAT } from "../../serverConfig"; import { hubspotController, OwnerSchema } from "./hubspot"; import { fuzzyMatchController } from "./fuzzyMatch"; +import { marked } from "marked"; const VideoInfoSchema = z.looseObject({ id: z.string(), @@ -63,8 +64,9 @@ export const processRequest = async (videoInfo: VideoInfo) => { const sheetsClient = googleDriveController.getSheetsClient(googleAuth); const jstStartsAt = dateController.convertToJst(startsAt); + const meetingDateStr = dateController.getFormattedDate(jstStartsAt, "yyyy年MM月dd日"); const jstEndsAt = dateController.convertToJst(endsAt); - const fileName = fileController.createMinutesFileName(title, hostName, jstStartsAt); + const fileName = fileController.createMinutesFileName(title, hostName, meetingDateStr); const videoUrl = `${MIITEL_URL}app/video/${videoId}`; @@ -74,23 +76,21 @@ export const processRequest = async (videoInfo: VideoInfo) => { const createZip = await fileController.createZip(videoInfo, outputPath, fileName); if(!createZip) throw createCustomError("CREATE_ZIP_FILE_FAILED"); - const logFileId = await callFunctionWithRetry(() => googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, `${fileName}.zip`, "application/zip")); + const logFileId = await callFunctionWithRetry(() => googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, fileName + '.zip', "application/zip")); if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED"); // ===== Generate Minutes ===== const minutes = await callFunctionWithRetry(() => aiController.generateMinutes(speechRecognition)); + console.log(minutes); if (!minutes) throw createCustomError("AI_GENERATION_FAILED"); - let content = `会議履歴URL:${videoUrl}\n`; - content += `担当者:${hostName}\n\n`; - content += minutes; - + const html = await marked.parse(minutes); + let content = `

会議履歴URL:${videoUrl}

`; + content += `

担当者:${hostName}

`; + content += html; // ===== Upload To Google Drive ===== - const documentId = await callFunctionWithRetry(() => googleDriveController.createNewFile(driveClient, GOOGLE_DRIVE_FOLDER_ID, fileName, DOCUMENT_MIMETYPE)); + const documentId = await callFunctionWithRetry(() => googleDriveController.createMinutesDocument(driveClient, GOOGLE_DRIVE_FOLDER_ID, fileName, content)); if (!documentId) throw createCustomError("CREATE_NEW_DOCUMENT_FAILED"); - const addContentResult = await callFunctionWithRetry(() => googleDriveController.addContentToDocs(docsClient, documentId, content)); - if(!addContentResult) throw createCustomError("UPLOAD_MINUTES_FAILED"); - // ===== Create Meeting Log at Hubspot ===== const ownersJson = await callFunctionWithRetry(() => storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME)); @@ -102,7 +102,7 @@ export const processRequest = async (videoInfo: VideoInfo) => { const extractedCompanyName = fileController.extractCompanyNameFromTitle(title); const matchedCompany = await fuzzyMatchController.searchMatchedCompany(extractedCompanyName); if(matchedCompany) { - const createLogResult = await callFunctionWithRetry(() => hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, minutes, startsAt, endsAt)); + const createLogResult = await callFunctionWithRetry(() => hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, content, startsAt, endsAt)); if(!createLogResult) throw createCustomError("CREATE_MEETING_LOG_FAILED"); } @@ -113,10 +113,9 @@ export const processRequest = async (videoInfo: VideoInfo) => { if(!sheetId) throw createCustomError("GET_SHEET_ID_FAILED"); const currentJstDateTimeStr = dateController.getCurrentJstTime(DATETIME_FORMAT); - const currentJstDateStr = dateController.getCurrentJstTime(DATE_FORMAT); const rowData: LogRowData = LogRowDataSchema.parse({ timestamp: currentJstDateTimeStr, - meetingDate: currentJstDateStr, + meetingDate: meetingDateStr, title: title, matchedCompanyName: matchedCompany?.name ?? '', ownerName: hostName, @@ -133,7 +132,7 @@ export const processRequest = async (videoInfo: VideoInfo) => { } }; -export const logUploadProcess = async (videoInfo: VideoInfo) => { +export const logUploadProcess = async (videoInfo: VideoInfo): Promise => { try { const videoId = videoInfo.id; const title = videoInfo.title; @@ -144,7 +143,7 @@ export const logUploadProcess = async (videoInfo: VideoInfo) => { const hostName = videoInfo.host.user_name; const speechRecognition = videoInfo.speech_recognition.raw; - if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return; + if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return true; // ===== Init ===== const googleAuth = await googleDriveController.getAuth(); @@ -153,8 +152,9 @@ export const logUploadProcess = async (videoInfo: VideoInfo) => { const sheetsClient = googleDriveController.getSheetsClient(googleAuth); const jstStartsAt = dateController.convertToJst(startsAt); + const meetingDateStr = dateController.getFormattedDate(jstStartsAt, "yyyy年MM月dd日"); const jstEndsAt = dateController.convertToJst(endsAt); - const fileName = fileController.createMinutesFileName(title, hostName, jstStartsAt); + const fileName = fileController.createMinutesFileName(title, hostName, meetingDateStr); const videoUrl = `${MIITEL_URL}app/video/${videoId}`; @@ -166,6 +166,27 @@ export const logUploadProcess = async (videoInfo: VideoInfo) => { const logFileId = await callFunctionWithRetry(() => googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, `${fileName}.zip`, "application/zip")); if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED"); + fs.unlinkSync(outputPath); + return true; + } catch(error) { + console.log(error); + fs.unlinkSync(outputPath); + return false; + } +}; + +export const testProcess = async () => { + try { + // Google Drive 接続確認 + const googleAuth = await googleDriveController.getAuth(); + const driveClilent = googleDriveController.getDriveClient(googleAuth); + const driveResponse = await googleDriveController.checkConnection(driveClilent); + if(!driveResponse) throw createCustomError("CONNECT_GOOGLE_DRIVE_FAILED"); + + // Hubspot 接続確認 + const hubspotResponse = await hubspotController.check(); + if(!hubspotResponse) throw createCustomError("CONNECT_HUBSPOT_FAILED"); + return; } catch(error) { throw error; } diff --git a/functions/generate_minutes/src/logics/storage.ts b/functions/generate_minutes/src/logics/storage.ts index f6ea454..1f27bdb 100644 --- a/functions/generate_minutes/src/logics/storage.ts +++ b/functions/generate_minutes/src/logics/storage.ts @@ -16,7 +16,7 @@ export const storageController = { }, loadFromGCS: async(folder: string, filename: string): Promise => { const file = bucket.file(`${folder}/${filename}`); - // console.log("loading file:", file.name); + console.log("loading file:", `${folder}/${filename}`); try { const [data] = await file.download(); return zlib.gunzipSync(data).toString("utf-8"); @@ -46,15 +46,20 @@ export const storageController = { }, getFileList: async(): Promise => { try { - const files = await bucket.getFiles({ + const results = await bucket.getFiles({ prefix: 'request_log/', }); - const list = []; - for(const f of files[0]) { - // console.log(f.name) - list.push(f.name); - } - return list; + const files = results[0]; + files.sort((a, b) => { + if(!a.metadata.timeCreated || !b.metadata.timeCreated) return 0; + const timeA = new Date(a.metadata.timeCreated).getTime(); + const timeB = new Date(b.metadata.timeCreated).getTime(); + return timeA - timeB; + }); + // for(const f of files[0]) { + // list.push(f.name); + // } + return files.map((f) => f.name); } catch(error) { return null; } diff --git a/functions/generate_minutes/src/stores/errorCodes.ts b/functions/generate_minutes/src/stores/errorCodes.ts index de2c313..eda47ef 100644 --- a/functions/generate_minutes/src/stores/errorCodes.ts +++ b/functions/generate_minutes/src/stores/errorCodes.ts @@ -2,27 +2,36 @@ export const ERROR_DEFINITIONS = { ZOD_FAILED: { code: "E1003", message: "zodのチェックが失敗しました", statusCode: -1 }, - // ログ ZIP の Google Drive アップロード失敗 - UPLOAD_LOG_FAILED: { code: "E3001", message: "ログファイルのアップロードに失敗しました", statusCode: 500 }, - - // AI による議事録生成失敗 - AI_GENERATION_FAILED: { code: "E2001", message: "AIによる議事録生成に失敗しました", statusCode: 500 }, - + + // Google Drive関連 // 議事録(Google Docs)の作成/アップロード失敗 - CREATE_NEW_DOCUMENT_FAILED: { code: "E3002", message: "ドキュメント作成に失敗しました", statusCode: 500 }, - UPLOAD_MINUTES_FAILED: { code: "E3003", message: "議事録のアップロードに失敗しました", statusCode: 500 }, + CONNECT_GOOGLE_DRIVE_FAILED: { code: "E2001", message: "ファイル一覧取得に失敗しました", statusCode: 500 }, + GET_FOLDER_ID_FAILED: { code: "E2002", message: "フォルダID取得に失敗しました", statusCode: 500 }, + GET_SHEET_ID_FAILED: { code: "E2003", message: "スプレッドシートID取得に失敗しました", statusCode: 500 }, + + CREATE_NEW_DOCUMENT_FAILED: { code: "E2004", message: "ドキュメント作成に失敗しました", statusCode: 500 }, + + UPLOAD_MINUTES_FAILED: { code: "E2005", message: "議事録のアップロードに失敗しました", statusCode: 500 }, + UPLOAD_LOG_FAILED: { code: "E2006", message: "ログファイルのアップロードに失敗しました", statusCode: 500 }, + + INSERT_ROW_FAILED: { code: "E2007", message: "シートへのデータ追加に失敗しました", statusCode: 500 }, + // Hubspot関連 // オーナー情報の取得失敗 + CONNECT_HUBSPOT_FAILED: { code: "E3001", message: "ファイル一覧取得に失敗しました", statusCode: 500 }, GET_OWNERS_FAILED: { code: "E3004", message: "オーナー情報の取得に失敗しました", statusCode: 500 }, GET_COMPANIES_FAILED: { code: "E3005", message: "会社情報の取得に失敗しました", statusCode: 500 }, - GET_FOLDER_ID_FAILED: { code: "E3007", message: "フォルダID取得に失敗しました", statusCode: 500 }, - GET_SHEET_ID_FAILED: { code: "E3008", message: "スプレッドシートID取得に失敗しました", statusCode: 500 }, - CREATE_ZIP_FILE_FAILED: { code: "E3009", message: "ZIPファイルの作成に失敗しました", statusCode: 500 }, - INSERT_ROW_FAILED: { code: "E3009", message: "シートへのデータ追加に失敗しました", statusCode: 500 }, + GET_FILES_FAILED: { code: "E3010", message: "ファイルの取得に失敗しました", statusCode: 500 }, CREATE_MEETING_LOG_FAILED: { code: "E3011", message: "ミーティングログ作成に失敗しました", statusCode: 500 }, + + // AI による議事録生成失敗 + AI_GENERATION_FAILED: { code: "E4001", message: "AIによる議事録生成に失敗しました", statusCode: 500 }, + + + CREATE_ZIP_FILE_FAILED: { code: "E3007", message: "ZIPファイルの作成に失敗しました", statusCode: 500 }, } as const; export type ErrorKey = keyof typeof ERROR_DEFINITIONS; diff --git a/terraform/prod/initial/main.tf b/terraform/prod/IAM/main.tf similarity index 60% rename from terraform/prod/initial/main.tf rename to terraform/prod/IAM/main.tf index 6d5566d..06e8c12 100755 --- a/terraform/prod/initial/main.tf +++ b/terraform/prod/IAM/main.tf @@ -30,22 +30,6 @@ resource "google_project_iam_member" "cf_sa_role" { } -# Cloud Workflows用サービスアカウント -resource "google_service_account" "workflows_sa" { - project = var.project_id - account_id = "mrt-cloudworkflows-sa" - display_name = "Cloud Workflows SA" -} - -# 権限を SA に付与 -resource "google_project_iam_member" "wf_cf_role" { - for_each = toset(["roles/cloudfunctions.invoker","roles/run.invoker"]) - project = var.project_id - role = each.value - member = "serviceAccount:${google_service_account.workflows_sa.email}" -} - - # API Gateway用サービスアカウント resource "google_service_account" "gateway_sa" { project = var.project_id @@ -62,17 +46,17 @@ resource "google_project_iam_member" "gateway_role" { } -# cloud build用サービスアカウント -resource "google_service_account" "cloudbuild_sa" { +# Scheduler実行用サービスアカウント +resource "google_service_account" "cf_scheduler_sa" { project = var.project_id - account_id = "mrt-cloudbuild-sa" - display_name = "Cloud Build 用サービスアカウント" + account_id = "mrt-scheduler-sa" + display_name = "Cloud Functions 起動用サービスアカウント" } # 権限を SA に付与 -resource "google_project_iam_member" "cloudbuild_role" { - for_each = toset(["roles/cloudbuild.builds.builder","roles/storage.objectAdmin", "roles/artifactregistry.writer", "roles/developerconnect.readTokenAccessor", "roles/cloudfunctions.developer","roles/workflows.admin", "roles/iam.serviceAccountUser"]) +resource "google_project_iam_member" "scheduler_role" { + for_each = toset(["roles/cloudfunctions.invoker","roles/run.invoker"]) project = var.project_id role = each.value - member = "serviceAccount:${google_service_account.cloudbuild_sa.email}" -} \ No newline at end of file + member = "serviceAccount:${google_service_account.cf_scheduler_sa.email}" +} diff --git a/terraform/prod/scheduler/main.tf b/terraform/prod/scheduler/main.tf index e7441a4..0833182 100755 --- a/terraform/prod/scheduler/main.tf +++ b/terraform/prod/scheduler/main.tf @@ -10,37 +10,22 @@ variable "region" { variable "function_name" { type = string - default = "mrt-create-log-sheet" + default = "generate-minutes" } -# Scheduler実行用サービスアカウント -resource "google_service_account" "cf_scheduler_sa" { - project = var.project_id - account_id = "mrt-scheduler-sa" - display_name = "Cloud Functions 起動用サービスアカウント" -} -# 権限を SA に付与 -resource "google_project_iam_member" "scheduler_role" { - for_each = toset(["roles/cloudfunctions.invoker","roles/run.invoker"]) - project = var.project_id - role = each.value - member = "serviceAccount:${google_service_account.cf_scheduler_sa.email}" -} - - -# 毎月1日0時に Function を実行する Scheduler ジョブ -resource "google_cloud_scheduler_job" "monthly_cf_trigger" { +# 毎日3時に Function を実行する Scheduler ジョブ +resource "google_cloud_scheduler_job" "daily_cf_trigger" { project = var.project_id - name = "monthly-cf-trigger" - description = "Invoke Cloud Function on the 1st of each month at 00:00" + name = "daily-cf-trigger" + description = "Invoke Cloud Function everyday at 03:00" region = var.region - schedule = "0 0 1 * *" + schedule = "0 3 * * *" time_zone = "Asia/Tokyo" http_target { - uri = "https://${var.region}-${var.project_id}.cloudfunctions.net/${var.function_name}" + uri = "https://${var.region}-${var.project_id}.cloudfunctions.net/${var.function_name}/api/dailyBatch" http_method = "POST" oidc_token { service_account_email = google_service_account.cf_scheduler_sa.email