Compare commits

...
Sign in to create a new pull request.

6 commits

Author SHA1 Message Date
6454e1b46b 20251224 2025-12-24 11:36:34 +09:00
1259ba76c9 テスト環境デプロイ 2025-12-09 14:57:17 +09:00
bb072cc91c api call retry 2025-12-08 14:22:40 +09:00
c004f6c34f add re-execute API
error hundling
2025-12-05 16:01:59 +09:00
395fba645d python -> node.js 2025-12-05 14:12:11 +09:00
092f2ec0f3 20251125 save 2025-11-25 14:54:01 +09:00
75 changed files with 1803 additions and 2079 deletions

18
.gitignore vendored
View file

@ -1,14 +1,10 @@
handle-company-webhook/
terraform.*
.terraform*
IAM/
test/
venv/
__pycache__/
*.csv
request.json
node_modules/
dist/
.env*
credentials*
package-lock.json
*.sh
log/

View file

@ -1,16 +0,0 @@
#!/bin/bash
# APIエンドポイントURL
API_URL="https://sales-tool-gw-dev-ex1cujb.an.gateway.dev/trigger-minutes-workflow-from-miitel"
# APIキーヘッダーに付与する場合
API_KEY="AIzaSyBVJOtvJTB4noAfUGEyMhCRqsF5yfypENc"
# リクエストボディ
JSON_FILE="request.json"
# curlコマンド実行
curl -X POST "$API_URL" \
-H "Content-Type: application/json" \
-H "x-api-key: $API_KEY" \
-d @"$JSON_FILE"

View file

@ -7,7 +7,7 @@ PROJECT_ID="datacom-poc"
API_NAME="sales-tool-api"
# プロジェクトを設定
gcloud auth application-default set-quota-project dmiru-dev
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
# API Gatewayを作成

View file

@ -2,11 +2,11 @@
# 環境変数
API_NAME="sales-tool-api"
API_CONFIG_NAME="sales-tool-gw-dev-conf-20250619"
GATEWAY_NAME="sales-tool-gw-dev"
API_CONFIG_NAME="sales-tool-gw-dev-conf-20251209"
GATEWAY_NAME="sales-tool-gw-dev-20251209"
OPENAPI_SPEC="openapi_dev.yaml"
PROJECT_ID="datacom-poc"
SERVICE_ACCOUNT="api-gateway-mpos@datacom-poc.iam.gserviceaccount.com"
SERVICE_ACCOUNT="mrt-apigateway-sa-devtest@datacom-poc.iam.gserviceaccount.com"
LOCATION="asia-northeast1"
gcloud auth application-default set-quota-project $PROJECT_ID

View file

@ -5,10 +5,6 @@ info:
version: '1.0.0'
schemes:
- 'https'
host: 'crate-minutes-gw-a8slsa47.an.gateway.dev'
x-google-endpoints:
- name: 'crate-minutes-gw-a8slsa47.an.gateway.dev'
allowCors: True
paths:
/create-minutes:
post:
@ -70,6 +66,321 @@ paths:
Access-Control-Allow-Headers:
type: string
default: 'Content-Type, x-api-key'
/miitel:
post:
description: 'Miitel Webhook Processer'
operationId: 'miitel'
x-google-backend:
address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/miitel
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
400:
description: 'Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/dailyBatch:
post:
description: 'get companies and owners'
operationId: 'dailyBatch'
x-google-backend:
address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/dailyBatch
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
400:
description: 'Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/getLog:
post:
description: 'get log'
operationId: 'getLog'
x-google-backend:
address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/getLog
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
400:
description: 'Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/reExecute:
post:
description: ''
operationId: 'reExecute'
x-google-backend:
address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/reExecute
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
400:
description: 'Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/test:
post:
description: 'test'
operationId: 'test'
x-google-backend:
address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/test
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
400:
description: 'Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
/alertTest:
post:
description: 'alertTest'
operationId: 'alertTest'
x-google-backend:
address: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes/api/alertTest
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-rational-timing-443808-u0.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
options:
summary: 'CORS support'
operationId: 'test-options'
responses:
204:
description: 'CORS preflight'
headers:
Access-Control-Allow-Origin:
type: string
default: '*'
Access-Control-Allow-Methods:
type: string
default: 'GET, POST, OPTIONS'
Access-Control-Allow-Headers:
type: string
default: 'Content-Type, x-api-key'
securityDefinitions:
APIKeyHeader:
type: apiKey

View file

@ -6,13 +6,191 @@ info:
schemes:
- 'https'
paths:
/trigger-minutes-workflow-from-miitel:
/miitel:
post:
description: 'ワークフロー呼び出し処理'
operationId: 'trigger-minutes-workflow-from-miitel'
description: 'Miitel Webhook Processer'
operationId: 'miitel'
x-google-backend:
address: https://asia-northeast1-datacom-poc.cloudfunctions.net/mrt-trigger-minutes-workflow-from-miitel
address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/miitel
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/dailyBatch:
post:
description: 'get companies and owners'
operationId: 'dailyBatch'
x-google-backend:
address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/dailyBatch
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/getLog:
post:
description: 'get log'
operationId: 'getLog'
x-google-backend:
address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/getLog
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/reExecute:
post:
description: ''
operationId: 'reExecute'
x-google-backend:
address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/reExecute
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description: JSON payload
required: false
schema:
type: object
additionalProperties: true
responses:
200:
description: 'OK'
schema:
type: object
additionalProperties: true
401:
description: 'Auth Error'
schema:
type: object
properties:
error:
type: string
500:
description: 'Error'
schema:
type: object
properties:
error:
type: string
security:
- APIKeyHeader: []
/test:
post:
description: 'test'
operationId: 'test'
x-google-backend:
address: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes/api/test
path_translation: CONSTANT_ADDRESS
jwt_audience: https://asia-northeast1-datacom-poc.cloudfunctions.net/generate_minutes
deadline: 600
consumes:
- application/json
produces:
@ -49,10 +227,7 @@ paths:
- APIKeyHeader: []
options:
summary: 'CORS support'
operationId: 'trigger-minutes-workflow-from-miitel-options'
x-google-backend:
address: https://asia-northeast1-datacom-poc.cloudfunctions.net/mrttrigger-minutes-workflow-from-miitel
path_translation: CONSTANT_ADDRESS
operationId: 'test-options'
responses:
204:
description: 'CORS preflight'

View file

@ -1,196 +0,0 @@
substitutions:
_ENV: 'dev'
_CF_SERVICE_ACCOUNT: 'mrt-cloudfunctions-sa-devtest'
_CW_SERVICE_ACCOUNT: 'mrt-cloudworkflows-sa-devtest'
options:
logging: CLOUD_LOGGING_ONLY
steps:
# 会社一覧取得
- id: 'gcloud functions deploy mrt-export-companies-to-gcs'
name: gcr.io/cloud-builders/gcloud
dir: 'functions/export-companies-to-gcs'
args: [
'functions',
'deploy',
'mrt-export-companies-to-gcs',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# 担当者一覧取得
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/export-owners-to-gcs'
args: [
'functions',
'deploy',
'mrt-export-owners-to-gcs',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# スプレッドシート作成
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/create-log-sheet'
args: [
'functions',
'deploy',
'mrt-create-log-sheet',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# ワークフロー呼び出し関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/trigger-minutes-workflow-from-miitel'
args: [
'functions',
'deploy',
'mrt-trigger-minutes-workflow-from-miitel',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# 議事録作成関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/generate-meeting-minutes'
args: [
'functions',
'deploy',
'mrt-generate-meeting-minutes',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--cpu=0.5',
'--memory=1Gi',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--timeout=10m',
'--quiet',
]
waitFor: ['-']
# 議事録をドライブへアップロードする関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/upload-minutes-to-drive'
args: [
'functions',
'deploy',
'mrt-upload-minutes-to-drive',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--cpu=0.5',
'--memory=1Gi',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# Hubspot連携関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/create-hubspot-meeting-log'
args: [
'functions',
'deploy',
'mrt-create-hubspot-meeting-log',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# スプレッドシートへ記録
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/append-log-to-sheet'
args: [
'functions',
'deploy',
'mrt-append-log-to-sheet',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_dev',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# ワークフロー
- name: gcr.io/cloud-builders/gcloud
dir: 'workflows/workflow-create-minutes'
args:
[
'workflows',
'deploy',
'mrt-workflow-create-minutes',
'--location=asia-northeast1',
'--source=main.yaml',
'--service-account=$_CW_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--quiet',
]

View file

@ -1,193 +0,0 @@
substitutions:
_ENV: 'prod'
_CF_SERVICE_ACCOUNT: 'mrt-cloudfunctions-sa'
_CW_SERVICE_ACCOUNT: 'mrt-cloudworkflows-sa'
steps:
# 会社一覧取得
- id: 'gcloud functions deploy mrt-export-companies-to-gcs'
name: gcr.io/cloud-builders/gcloud
dir: 'functions/export-companies-to-gcs'
args: [
'functions',
'deploy',
'mrt-export-companies-to-gcs',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# 担当者一覧取得
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/export-owners-to-gcs'
args: [
'functions',
'deploy',
'mrt-export-owners-to-gcs',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# スプレッドシート作成
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/create-log-sheet'
args: [
'functions',
'deploy',
'mrt-create-log-sheet',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# ワークフロー呼び出し関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/trigger-minutes-workflow-from-miitel'
args: [
'functions',
'deploy',
'mrt-trigger-minutes-workflow-from-miitel',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# 議事録作成関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/generate-meeting-minutes'
args: [
'functions',
'deploy',
'mrt-generate-meeting-minutes',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--cpu=0.5',
'--memory=1Gi',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--timeout=10m',
'--quiet',
]
waitFor: ['-']
# 議事録をドライブへアップロードする関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/upload-minutes-to-drive'
args: [
'functions',
'deploy',
'mrt-upload-minutes-to-drive',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--cpu=0.5',
'--memory=1Gi',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# Hubspot連携関数
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/create-hubspot-meeting-log'
args: [
'functions',
'deploy',
'mrt-create-hubspot-meeting-log',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# スプレッドシートへ記録
- name: gcr.io/cloud-builders/gcloud
dir: 'functions/append-log-to-sheet'
args: [
'functions',
'deploy',
'mrt-append-log-to-sheet',
'--gen2',
'--runtime=python312',
'--region=asia-northeast1',
'--source=./source', # dir で切り替えているので「.」
'--entry-point=handle_request', # 変更する場合はここ
'--trigger-http',
'--no-allow-unauthenticated',
'--service-account=$_CF_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--env-vars-file=.env_prod',
'--project=$PROJECT_ID',
'--quiet',
]
waitFor: ['-']
# ワークフロー
- name: gcr.io/cloud-builders/gcloud
dir: 'workflows/workflow-create-minutes'
args:
[
'workflows',
'deploy',
'mrt-workflow-create-minutes',
'--location=asia-northeast1',
'--source=main.yaml',
'--service-account=$_CW_SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com',
'--quiet',
]

View file

@ -1,5 +0,0 @@
KEY_PATH=projects/32472615575/secrets/sa-access-google-drive-key
LOG_FOLDER_ID=1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
MEETING_FOLDER_ID=1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
HUBSPOT_COMPANY_URL=https://app-na2.hubspot.com/contacts/242960467/record/0-2
MODE=dev

View file

@ -1,5 +0,0 @@
KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key
LOG_FOLDER_ID: 1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
MEETING_FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
HUBSPOT_COMPANY_URL: https://app-na2.hubspot.com/contacts/242960467/record/0-2
MODE: dev

View file

@ -1,5 +0,0 @@
KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key
LOG_FOLDER_ID: 1arL6AxpvA7N6Umg4wdrdAcRWBdKc-Jfb
MEETING_FOLDER_ID: 0AGT_1dSq66qYUk9PVA
HUBSPOT_COMPANY_URL: https://app.hubspot.com/contacts/22400567/record/0-2
MODE: production

View file

@ -1,33 +0,0 @@
#!/bin/bash
# プロジェクトIDを設定
PROJECT_ID="datacom-poc"
# デプロイする関数名
FUNCTION_NAME="mrt-append-log-to-sheet"
# 関数のエントリポイント
ENTRY_POINT="handle_request"
# ランタイム
RUNTIME="python312"
# リージョン
REGION="asia-northeast1"
# 環境変数ファイル
ENV_VARS_FILE=".env_dev"
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
# デプロイコマンド
gcloud functions deploy $FUNCTION_NAME \
--gen2 \
--region $REGION \
--runtime $RUNTIME \
--source=./source \
--trigger-http \
--no-allow-unauthenticated \
--entry-point $ENTRY_POINT \
--env-vars-file $ENV_VARS_FILE

View file

@ -1,267 +0,0 @@
import functions_framework
from google.cloud import secretmanager
from google.oauth2 import service_account
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
import json
import os
from datetime import datetime, timezone, timedelta
sm_client = secretmanager.SecretManagerServiceClient()
SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"]
@functions_framework.http
def handle_request(request):
# POSTリクエストの処理
if request.method != 'POST':
return ('', 405, {'Allow': 'POST', 'Content-Type': 'application/json'}) # メソッドがPOSTでない場合は405エラーを返す
"""Shows basic usage of the Drive Activity API.
Prints information about the last 10 events that occured the user's Drive.
"""
try:
request_json = request.get_json()
print(request_json)
title = request_json['title'] # 会議タイトル
document_id = request_json['document_id'] # 議事録ファイルのID
matched_company_id = request_json['matched_company_id'] # マッチした会社ID
matched_company_name = request_json['matched_company_name'] # マッチした会社名
host_name = request_json['host_name'] # ホストユーザー名
video_url = request_json['video_url'] # 会議履歴URL
starts_at = request_json['starts_at'] # 開始日時
log_folder_id = os.getenv("LOG_FOLDER_ID") # 共有ドライブID
meeting_folder_id = os.getenv("MEETING_FOLDER_ID") # ミーティングフォルダID
hubspot_company_url = os.getenv("HUBSPOT_COMPANY_URL") # HubSpotの会社情報URL
mode = os.getenv("MODE") # モードdevまたはprod
service_account_info = get_service_account_info()
# 認証
credentials = get_credentials(service_account_info)
# APIクライアントの構築
drive_service = build("drive", "v3", credentials=credentials)
sheet_service = build("sheets", "v4", credentials=credentials)
# 現在日時をJSTに変換
jst_now = datetime.now(timezone.utc).astimezone(timezone(timedelta(hours=9)))
# JSTの現在日時を文字列に変換
ym_str = jst_now.strftime("%Y%m")
y_str = jst_now.strftime("%Y")
# 年別のフォルダを検索
target_folder = get_directory_files_dev(drive_service, log_folder_id, y_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, log_folder_id, y_str)
print("target_folder", target_folder)
year_folder_id = None
if not target_folder:
# フォルダが存在しない場合は新規作成
year_folder_id = create_new_folder(drive_service, log_folder_id, y_str)
else:
# フォルダが存在する場合はそのIDを使用
year_folder_id = target_folder[0]['id']
print("年別のフォルダID:", year_folder_id)
# スプレッドシートを検索
target_files = get_directory_files_dev(drive_service, year_folder_id, ym_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, year_folder_id, ym_str)
print("スプレッドシート", target_files)
if not target_files:
print('not found')
# スプレッドシートを作成
spreadsheet_id = create_new_spreadsheet(drive_service, year_folder_id, ym_str)
print("スプレッドシートID:", spreadsheet_id)
# 注意事項追加
append_log_to_sheet(sheet_service, spreadsheet_id, ["※シート名変更厳禁"])
# ヘッダーを追加
append_log_to_sheet(sheet_service, spreadsheet_id, HEADER_VALUES)
else:
print('found')
# ファイルIDを取得
spreadsheet_id = target_files[0]['id']
documnet_url = f"https://docs.google.com/document/d/{document_id}/edit" if document_id else ""
hubspot_url = f"{hubspot_company_url}/{matched_company_id}" if matched_company_id else ""
# テストログを追加
row_data = [jst_now.strftime("%Y-%m-%d %H:%M:%S"),
convert_to_jst_ymd(starts_at),
title,
matched_company_name,
host_name,
video_url,
documnet_url,
hubspot_url
]
append_log_to_sheet(sheet_service, spreadsheet_id, row_data)
print("ログを追加しました:", row_data)
return (json.dumps({"status": "success"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
except HttpError as error:
# TODO(developer) - Handleerrors from drive activity API.
print(f"An error occurred: {error}")
#
# SecretManagerから秘密鍵を取得
#
def get_service_account_info():
key_path = os.getenv('KEY_PATH') + "/versions/1"
# 秘密鍵取得
response = sm_client.access_secret_version(name=key_path)
# 秘密鍵の値をデコード
secret_key = response.payload.data.decode("UTF-8")
return json.loads(secret_key)
# Google Drive認証
def get_credentials(service_account_info):
credentials = service_account.Credentials.from_service_account_info(
service_account_info,
scopes=SCOPES
)
return credentials
# 開発用マイドライブからのファイルを取得
def get_directory_files_dev(service,shared_folder_id, filename):
"""
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
:param filename: ファイル名
:param directory_id: ディレクトリID
:param pages_max: 最大ページ探索数
:return: ファイルリスト
"""
items = []
page = 0
pages_max = 10 # 最大ページ数
while True:
page += 1
if page == pages_max:
break
results = service.files().list(
corpora="user",
includeItemsFromAllDrives=True,
includeTeamDriveItems=True,
q=f"'{shared_folder_id}' in parents and name = '{filename}' and trashed = false",
supportsAllDrives=True,
pageSize=10,
fields="nextPageToken, files(id, name)").execute()
items += results.get("files", [])
page_token = results.get('nextPageToken', None)
if page_token is None:
break
return items
# 本番用共有ドライブからのファイルを取得
def get_directory_files_prod(service,shared_folder_id,sub_folder_id,filename):
"""
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
:param filename: ファイル名
:param directory_id: ディレクトリID
:param pages_max: 最大ページ探索数
:return: ファイルリスト
"""
items = []
page = 0
pages_max = 10 # 最大ページ数
while True:
page += 1
if page == pages_max:
break
results = service.files().list(
corpora="drive",
driveId=shared_folder_id,
includeItemsFromAllDrives=True,
includeTeamDriveItems=True,
q=f"'{sub_folder_id}' in parents and name = '{filename}' and trashed = false",
supportsAllDrives=True,
pageSize=10,
fields="nextPageToken, files(id, name, parents)").execute()
items += results.get("files", [])
page_token = results.get('nextPageToken', None)
if page_token is None:
break
return items
def create_new_folder(service, sub_folder_id, title):
"""
Google Drive APIを使用して新しいフォルダを作成する
:param service: Google Drive APIのサービスオブジェクト
:param title: フォルダのタイトル
:return: 作成したフォルダのID
"""
file_metadata = {
"name": title,
"parents": [sub_folder_id], # 共有ドライブのIDを指定
"mimeType": "application/vnd.google-apps.folder",
}
result = service.files().create(body=file_metadata, fields="id", supportsAllDrives=True).execute()
return result.get('id')
def create_new_spreadsheet(service,folder_id,title):
"""
Google Sheets APIを使用して新しいスプレッドシートを作成する
:param service: Google Sheets APIのサービスオブジェクト
:param title: スプレッドシートのタイトル
:return: 作成したスプレッドシートのID
"""
file_metadata = {
'name': title,
'parents': [folder_id], # 作成したフォルダのIDを指定
'mimeType': 'application/vnd.google-apps.spreadsheet',
}
result = (
service.files()
.create(body=file_metadata, fields="id", supportsAllDrives=True)
.execute()
)
return result.get("id")
def append_log_to_sheet(service, spreadsheet_id, row_data):
"""
Google Sheets APIを使用してスプレッドシートにログを追加する
:param service: Google Sheets APIのサービスオブジェクト
:param spreadsheet_id: スプレッドシートのID
:param row_data: 追加するログデータリスト形式
"""
body = {
'values': [row_data]
}
# スプレッドシートにログを追加
result = service.spreadsheets().values().append(
spreadsheetId=spreadsheet_id,
range='Sheet1',
valueInputOption="USER_ENTERED",
insertDataOption='INSERT_ROWS',
body=body,
).execute()
print(f"{result.get('updates').get('updatedCells')} cells appended.")
def convert_to_jst_ymd(starts_at):
"""
開始日時をYYYY年MM月DD日形式に変換する
:param starts_at: 開始日時の文字列
:return: YYYY年MM月DD日形式の文字列
"""
# 開始日時をUTCからJSTに変換
dt = datetime.fromisoformat(starts_at.replace("Z", "+00:00")).astimezone(timezone(timedelta(hours=9)))
# YYYY年MM月DD日形式に変換
return dt.strftime("%Y年%m月%d")

View file

@ -1,5 +0,0 @@
functions-framework==3.*
google-cloud-secret-manager
google-api-python-client
google-auth-httplib2
google-auth-oauthlib

View file

@ -1,5 +0,0 @@
PROJECT_ID=datacom-poc
LOCATION=asia-northeast1
BUCKET=meeting-report-data
KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken
MODE=dev

View file

@ -1,5 +0,0 @@
PROJECT_ID: datacom-poc
LOCATION: asia-northeast1
BUCKET: meeting-report-data
KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken
MODE: dev

View file

@ -1,5 +0,0 @@
PROJECT_ID: rational-timing-443808-u0
LOCATION: asia-northeast1
BUCKET: meeting-data
KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken
MODE: prod

View file

@ -1,33 +0,0 @@
#!/bin/bash
# プロジェクトIDを設定
PROJECT_ID="datacom-poc"
# デプロイする関数名
FUNCTION_NAME="mrt-create-hubspot-meeting-log"
# 関数のエントリポイント
ENTRY_POINT="handle_request"
# ランタイム
RUNTIME="python312"
# リージョン
REGION="asia-northeast1"
# 環境変数ファイル
ENV_VARS_FILE=".env_dev"
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
# デプロイコマンド
gcloud functions deploy $FUNCTION_NAME \
--gen2 \
--region $REGION \
--runtime $RUNTIME \
--source=./source \
--trigger-http \
--no-allow-unauthenticated \
--entry-point $ENTRY_POINT \
--env-vars-file $ENV_VARS_FILE

View file

@ -1,200 +0,0 @@
import functions_framework
from google.cloud import storage, secretmanager
import os
import hubspot
from hubspot.crm.objects.meetings import SimplePublicObjectInputForCreate, ApiException
import requests
import csv
import io
import re
import jaconv
from rapidfuzz import process, fuzz
import json
CUTOFF = 80 # Fuzzy 閾値 (0-100)
LEGAL_SUFFIX = r'(株式会社|(株)|\(株\)|有限会社|合同会社|Inc\.?|Corp\.?|Co\.?Ltd\.?)'
cs_client = storage.Client(project=os.getenv("PROJECT_ID"))
sm_client = secretmanager.SecretManagerServiceClient()
@functions_framework.http
def handle_request(request):
try:
request_json = request.get_json()
print(request_json)
mode = os.getenv("MODE") # モードdevまたはprod
title = request_json['title']
host_id = request_json['host_id'] if mode == 'prod' else 'ksuenaga@datacom.jp' # ホストユーザーID(開発環境では固定値を使用)
starts_at = request_json['starts_at']
ends_at = request_json['ends_at']
minutes = request_json['minutes']
# タイトルから【】を削除
title = title.replace("", "").replace("", "")
# タイトルから企業名を抽出
company_name = title.split("")[0].strip() # "様" で分割して企業名を取得
print("抽出した企業名:", company_name)
# 会社名から会社IDを取得
matched_company_id, matched_company_name = search_company(company_name)
# マッチしたときだけ処理を行う
if matched_company_id:
# ユーザーIDを取得
by_email = load_owners()
user_id = None
if host_id in by_email:
user_id = by_email[host_id]['id']
print("取得したユーザーID:", user_id)
# 改行コードを <br> タグに変換
minutes_html = minutes.replace("\n", "<br>")
# ミーティングログを作成
create_meeting_log(matched_company_id, title, user_id, starts_at, ends_at, minutes_html)
response_data = {
"matched_company_id": matched_company_id, # マッチした会社ID
"matched_company_name": matched_company_name, # マッチした会社名
}
return (json.dumps(response_data, ensure_ascii=False), 200, {"Content-Type": "application/json"})
except ApiException as e:
print("Exception when calling basic_api->create: %s\n" % e)
def normalize(name: str) -> str:
"""表記ゆれ吸収用の正規化"""
n = jaconv.z2h(name, kana=False, digit=True, ascii=True).lower()
n = re.sub(LEGAL_SUFFIX, '', n)
return re.sub(r'[\s\-・・,,、\.]', '', n)
# GCSから会社一覧取得
def load_componies():
"""
毎回 Cloud Storage から CSV を読み込む
*応答速度を気にしない* 前提なのでキャッシュしなくても OK
"""
blob = cs_client.bucket(os.getenv("BUCKET")).blob('master/mst_company.csv')
raw = blob.download_as_bytes() # bytes
recs, by_norm = [], {}
with io.StringIO(raw.decode("utf-8")) as f:
reader = csv.DictReader(f)
for row in reader:
row["norm_name"] = normalize(row["company_name"])
recs.append(row)
by_norm[row["norm_name"]] = row # 完全一致用ハッシュ
return recs, by_norm # (list[dict], dict)
# GCSから担当者一覧取得
def load_owners():
"""
GCS から担当者一覧 CSV を読み込み
email -> row 辞書 のマッピングを返す
"""
blob = cs_client.bucket(os.getenv("BUCKET")).blob('master/mst_owner.csv')
raw = blob.download_as_bytes() # bytes
by_email = {}
with io.StringIO(raw.decode("utf-8")) as f:
reader = csv.DictReader(f)
for row in reader:
# row に "email" と "user_id" フィールドがある前提
email = row["email"].strip().lower()
by_email[email] = row
return by_email
def fuzzy_candidates(norm: str, recs):
"""
norm : 正規化済み検索語
recs : 会社レコード list[dict] (norm_name 含む)
戻り値 : list[(score:int, idx:int)]
"""
top = 2 # 上位 2 件を取得
matches = process.extract(
norm,
[r["norm_name"] for r in recs],
scorer=fuzz.WRatio,
score_cutoff=CUTOFF,
limit=top
)
print("ファジーマッチ結果:", matches)
if len(matches) == 0:
return None # マッチなしの場合は None を返す
elif len(matches) == 1:
return recs[matches[0][2]] # 上位 1 件のみの場合はそのレコードを返す
else:
if(matches[0][1] == matches[1][1]):
return None # 上位 2 件のスコアが同じ場合は None を返す
return recs[matches[0][2]] # 上位 1 件のみの場合はそのレコードを返す
def search_company(company_name):
# -------------------- マスタ読み込み --------------------
recs, by_norm = load_componies()
norm_company_name = normalize(company_name)
print("正規化した企業名:", norm_company_name)
matched_company_id = None
matched_company_name = None
# -------------------- 完全一致 --------------------
if norm_company_name in by_norm:
matched_company_id = by_norm[norm_company_name]["company_id"]
matched_company_name = by_norm[norm_company_name]["company_name"]
# -------------------- ファジーマッチ複数 --------------------
else :
result = fuzzy_candidates(norm_company_name, recs)
if result:
matched_company_id = result["company_id"]
matched_company_name = result["company_name"]
print("マッチした会社ID:", matched_company_id)
print("マッチした会社名:", matched_company_name)
return matched_company_id, matched_company_name
def create_meeting_log(company_id ,title, user_id, starts_at, ends_at, minutes):
"""
HubSpot API を使ってミーティングログを作成する
"""
access_key = get_access_key() # Secret Manager からアクセストークンを取得
hs_client = hubspot.Client.create(access_token=access_key)
properties = {
"hs_timestamp": starts_at,
"hs_meeting_title": title,
"hubspot_owner_id": user_id,
"hs_meeting_body": minutes,
"hs_meeting_start_time": starts_at,
"hs_meeting_end_time": ends_at,
}
simple_public_object_input_for_create = SimplePublicObjectInputForCreate(
associations=[{"types":[{"associationCategory":"HUBSPOT_DEFINED","associationTypeId":188}],"to":{"id":company_id}}],
properties=properties
)
api_response = hs_client.crm.objects.meetings.basic_api.create(simple_public_object_input_for_create=simple_public_object_input_for_create)
print(api_response)
#
# SecretManagerからアクセストークンを取得
#
def get_access_key():
key_path = os.getenv('KEY_PATH') + "/versions/1"
# アクセストークン取得
response = sm_client.access_secret_version(name=key_path)
# アクセストークンをデコード
access_token = response.payload.data.decode("UTF-8")
return access_token

View file

@ -1,8 +0,0 @@
functions-framework==3.*
Flask
google-cloud-storage
google-cloud-workflows
google-cloud-secret-manager
hubspot-api-client
rapidfuzz
jaconv

View file

@ -1,5 +0,0 @@
KEY_PATH=projects/32472615575/secrets/sa-access-google-drive-key
LOG_FOLDER_ID=1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
MEETING_FOLDER_ID=1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
HUBSPOT_COMPANY_URL=https://app-na2.hubspot.com/contacts/242960467/record/0-2
MODE=dev

View file

@ -1,5 +0,0 @@
KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key
LOG_FOLDER_ID: 1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
MEETING_FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
HUBSPOT_COMPANY_URL: https://app-na2.hubspot.com/contacts/242960467/record/0-2
MODE: dev

View file

@ -1,5 +0,0 @@
KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key
LOG_FOLDER_ID: 1arL6AxpvA7N6Umg4wdrdAcRWBdKc-Jfb
MEETING_FOLDER_ID: 0AGT_1dSq66qYUk9PVA
HUBSPOT_COMPANY_URL: https://app.hubspot.com/contacts/22400567/record/0-2
MODE: production

View file

@ -1,33 +0,0 @@
#!/bin/bash
# プロジェクトIDを設定
PROJECT_ID="datacom-poc"
# デプロイする関数名
FUNCTION_NAME="mrt-create-log-sheet"
# 関数のエントリポイント
ENTRY_POINT="handle_request"
# ランタイム
RUNTIME="python312"
# リージョン
REGION="asia-northeast1"
# 環境変数ファイル
ENV_VARS_FILE=".env_dev"
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
# デプロイコマンド
gcloud functions deploy $FUNCTION_NAME \
--gen2 \
--region $REGION \
--runtime $RUNTIME \
--source=./source \
--trigger-http \
--no-allow-unauthenticated \
--entry-point $ENTRY_POINT \
--env-vars-file $ENV_VARS_FILE

View file

@ -1,218 +0,0 @@
import functions_framework
from google.cloud import secretmanager
from google.oauth2 import service_account
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
import json
import os
from datetime import datetime, timezone, timedelta
sm_client = secretmanager.SecretManagerServiceClient()
SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"]
@functions_framework.http
def handle_request(request):
# POSTリクエストの処理
if request.method != 'POST':
return ('', 405, {'Allow': 'POST', 'Content-Type': 'application/json'}) # メソッドがPOSTでない場合は405エラーを返す
"""Shows basic usage of the Drive Activity API.
Prints information about the last 10 events that occured the user's Drive.
"""
try:
log_folder_id = os.getenv("LOG_FOLDER_ID") # 共有ドライブID
meeting_folder_id = os.getenv("MEETING_FOLDER_ID") # ミーティングフォルダID
mode = os.getenv("MODE") # モードdevまたはprod
service_account_info = get_service_account_info()
# 認証
credentials = get_credentials(service_account_info)
# APIクライアントの構築
drive_service = build("drive", "v3", credentials=credentials)
sheet_service = build("sheets", "v4", credentials=credentials)
# 現在日時をJSTに変換
jst_now = datetime.now(timezone.utc).astimezone(timezone(timedelta(hours=9)))
# JSTの現在日時を文字列に変換
ym_str = jst_now.strftime("%Y%m")
y_str = jst_now.strftime("%Y")
# 年別のフォルダを検索
target_folder = get_directory_files_dev(drive_service, log_folder_id, y_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, log_folder_id, y_str)
print("target_folder", target_folder)
year_folder_id = None
if not target_folder:
# フォルダが存在しない場合は新規作成
year_folder_id = create_new_folder(drive_service, log_folder_id, y_str)
else:
# フォルダが存在する場合はそのIDを使用
year_folder_id = target_folder[0]['id']
print("年別のフォルダID:", year_folder_id)
# スプレッドシートを作成
spreadsheet_id = create_new_spreadsheet(drive_service, year_folder_id, ym_str)
print("スプレッドシートID:", spreadsheet_id)
# 注意事項追加
append_log_to_sheet(sheet_service, spreadsheet_id, ["※シート名変更厳禁"])
# ヘッダーを追加
append_log_to_sheet(sheet_service, spreadsheet_id, HEADER_VALUES)
return (json.dumps({"status": "success"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
except HttpError as error:
# TODO(developer) - Handleerrors from drive activity API.
print(f"An error occurred: {error}")
#
# SecretManagerから秘密鍵を取得
#
def get_service_account_info():
key_path = os.getenv('KEY_PATH') + "/versions/1"
# 秘密鍵取得
response = sm_client.access_secret_version(name=key_path)
# 秘密鍵の値をデコード
secret_key = response.payload.data.decode("UTF-8")
return json.loads(secret_key)
# Google Drive認証
def get_credentials(service_account_info):
credentials = service_account.Credentials.from_service_account_info(
service_account_info,
scopes=SCOPES
)
return credentials
# 開発用マイドライブからのファイルを取得
def get_directory_files_dev(service,shared_folder_id, filename):
"""
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
:param filename: ファイル名
:param directory_id: ディレクトリID
:param pages_max: 最大ページ探索数
:return: ファイルリスト
"""
items = []
page = 0
pages_max = 10 # 最大ページ数
while True:
page += 1
if page == pages_max:
break
results = service.files().list(
corpora="user",
includeItemsFromAllDrives=True,
includeTeamDriveItems=True,
q=f"'{shared_folder_id}' in parents and name = '{filename}' and trashed = false",
supportsAllDrives=True,
pageSize=10,
fields="nextPageToken, files(id, name)").execute()
items += results.get("files", [])
page_token = results.get('nextPageToken', None)
if page_token is None:
break
return items
# 本番用共有ドライブからのファイルを取得
def get_directory_files_prod(service,shared_folder_id,sub_folder_id,filename):
"""
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
:param filename: ファイル名
:param directory_id: ディレクトリID
:param pages_max: 最大ページ探索数
:return: ファイルリスト
"""
items = []
page = 0
pages_max = 10 # 最大ページ数
while True:
page += 1
if page == pages_max:
break
results = service.files().list(
corpora="drive",
driveId=shared_folder_id,
includeItemsFromAllDrives=True,
includeTeamDriveItems=True,
q=f"'{sub_folder_id}' in parents and name = '{filename}' and trashed = false",
supportsAllDrives=True,
pageSize=10,
fields="nextPageToken, files(id, name, parents)").execute()
items += results.get("files", [])
page_token = results.get('nextPageToken', None)
if page_token is None:
break
return items
def create_new_folder(service, sub_folder_id, title):
"""
Google Drive APIを使用して新しいフォルダを作成する
:param service: Google Drive APIのサービスオブジェクト
:param title: フォルダのタイトル
:return: 作成したフォルダのID
"""
file_metadata = {
"name": title,
"parents": [sub_folder_id], # 共有ドライブのIDを指定
"mimeType": "application/vnd.google-apps.folder",
}
result = service.files().create(body=file_metadata, fields="id", supportsAllDrives=True).execute()
return result.get('id')
def create_new_spreadsheet(service,folder_id,title):
"""
Google Sheets APIを使用して新しいスプレッドシートを作成する
:param service: Google Sheets APIのサービスオブジェクト
:param title: スプレッドシートのタイトル
:return: 作成したスプレッドシートのID
"""
file_metadata = {
'name': title,
'parents': [folder_id], # 作成したフォルダのIDを指定
'mimeType': 'application/vnd.google-apps.spreadsheet',
}
result = (
service.files()
.create(body=file_metadata, fields="id", supportsAllDrives=True)
.execute()
)
return result.get("id")
def append_log_to_sheet(service, spreadsheet_id, row_data):
"""
Google Sheets APIを使用してスプレッドシートにログを追加する
:param service: Google Sheets APIのサービスオブジェクト
:param spreadsheet_id: スプレッドシートのID
:param row_data: 追加するログデータリスト形式
"""
body = {
'values': [row_data]
}
# スプレッドシートにログを追加
result = service.spreadsheets().values().append(
spreadsheetId=spreadsheet_id,
range='Sheet1',
valueInputOption="USER_ENTERED",
insertDataOption='INSERT_ROWS',
body=body,
).execute()
print(f"{result.get('updates').get('updatedCells')} cells appended.")

View file

@ -1,5 +0,0 @@
functions-framework==3.*
google-cloud-secret-manager
google-api-python-client
google-auth-httplib2
google-auth-oauthlib

View file

@ -1,5 +0,0 @@
PROJECT_ID=datacom-poc
LOCATION=asia-northeast1
BUCKET=meeting-report-data
OBJECT=master/mst_company.csv
KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken

View file

@ -1,5 +0,0 @@
PROJECT_ID: datacom-poc
LOCATION: asia-northeast1
BUCKET: meeting-report-data
OBJECT: master/mst_company.csv
KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken

View file

@ -1,5 +0,0 @@
PROJECT_ID: rational-timing-443808-u0
LOCATION: asia-northeast1
BUCKET: meeting-data
OBJECT: master/mst_company.csv
KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken

View file

@ -1,87 +0,0 @@
import functions_framework
from google.cloud import storage, secretmanager
import os
import hubspot
from hubspot.crm.objects.meetings import ApiException
import csv
import io
import json
cs_client = storage.Client()
sm_client = secretmanager.SecretManagerServiceClient()
@functions_framework.http
def handle_request(request):
try:
# 会社一覧取得
companies = fetch_all_companies()
# メモリ上で CSV を生成
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer)
# ヘッダー行
writer.writerow(["company_id", "company_name"])
# 各行を書き込み
for row in companies:
company_id = row['properties']['hs_object_id']
company_name = row['properties']['name']
writer.writerow([company_id, company_name])
# Cloud Storage にアップロード
upload_to_gcs(csv_buffer)
return 'success', 200
except ApiException as e:
print("Exception when calling basic_api->create: %s\n" % e)
return (json.dumps("", ensure_ascii=False), 500, {"Content-Type": "application/json"})
def fetch_all_companies():
"""
Companies API get_page をページネーション付きで呼び出し
全オブジェクトをリストで返す
"""
access_key = get_access_key() # Secret Manager からアクセストークンを取得
hs_client = hubspot.Client.create(access_token=access_key)
all_companies = []
after = None
limit = 100 # 1 回あたりの取得件数(最大 100
while True:
# get_page の基本呼び出し
response = hs_client.crm.companies.basic_api.get_page(
limit=limit,
archived=False,
after=after
)
# レスポンスから companies の配列を追加
if response.results:
all_companies.extend([c.to_dict() for c in response.results])
# 次ページがない場合はループ終了
paging = response.paging
if not paging or not paging.next or not paging.next.after:
break
# next.after をセットして次ループへ
after = paging.next.after
return all_companies
def upload_to_gcs(data):
"""
メモリ上の CSV データを Cloud Storage にアップロード
"""
bucket = cs_client.bucket(os.getenv("BUCKET"))
blob = bucket.blob(os.getenv("OBJECT"))
blob.upload_from_string(data.getvalue(), content_type='text/csv')
#
# SecretManagerからアクセストークンを取得
#
def get_access_key():
key_path = os.getenv('KEY_PATH') + "/versions/1"
# アクセストークン取得
response = sm_client.access_secret_version(name=key_path)
# アクセストークンをデコード
access_token = response.payload.data.decode("UTF-8")
return access_token

View file

@ -1,5 +0,0 @@
functions-framework==3.*
Flask
google-cloud-storage
google-cloud-secret-manager
hubspot-api-client

View file

@ -1,5 +0,0 @@
PROJECT_ID=datacom-poc
LOCATION=asia-northeast1
BUCKET=meeting-report-data
OBJECT=master/mst_owner.csv
KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken

View file

@ -1,5 +0,0 @@
PROJECT_ID: datacom-poc
LOCATION: asia-northeast1
BUCKET: meeting-report-data
OBJECT: master/mst_owner.csv
KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken

View file

@ -1,5 +0,0 @@
PROJECT_ID: rational-timing-443808-u0
LOCATION: asia-northeast1
BUCKET: meeting-data
OBJECT: master/mst_owner.csv
KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken

View file

@ -1,90 +0,0 @@
import functions_framework
from google.cloud import storage, secretmanager
import os
import hubspot
from hubspot.crm.objects.meetings import ApiException
import csv
import io
import json
cs_client = storage.Client()
sm_client = secretmanager.SecretManagerServiceClient()
@functions_framework.http
def handle_request(request):
try:
# 会社一覧取得
owners = fetch_all_owners()
# メモリ上で CSV を生成
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer)
# ヘッダー行
writer.writerow(["id", "email"])
# 各行を書き込み
for row in owners:
user_id = row['id']
email = row['email']
writer.writerow([user_id, email])
# Cloud Storage にアップロード
upload_to_gcs(csv_buffer)
return (json.dumps('', ensure_ascii=False), 200, {"Content-Type": "application/json"})
except ApiException as e:
print("Exception when calling basic_api->create: %s\n" % e)
return (json.dumps("", ensure_ascii=False), 200, {"Content-Type": "application/json"})
def fetch_all_owners():
"""
Companies API get_page をページネーション付きで呼び出し
全オブジェクトをリストで返す
"""
access_key = get_access_key() # Secret Manager からアクセストークンを取得
hs_client = hubspot.Client.create(access_token=access_key)
all_owners = []
after = None
limit = 100 # 1 回あたりの取得件数(最大 100
while True:
# get_page の基本呼び出し
response = hs_client.crm.owners.owners_api.get_page(
limit=limit,
archived=False,
after=after
)
# レスポンスから companies の配列を追加
if response.results:
all_owners.extend([c.to_dict() for c in response.results])
# 次ページがない場合はループ終了
paging = response.paging
if not paging or not paging.next or not paging.next.after:
break
# next.after をセットして次ループへ
after = paging.next.after
return all_owners
def upload_to_gcs(data):
"""
メモリ上の CSV データを Cloud Storage にアップロード
"""
bucket = cs_client.bucket(os.getenv("BUCKET"))
blob = bucket.blob(os.getenv("OBJECT"))
blob.upload_from_string(data.getvalue(), content_type='text/csv')
#
# SecretManagerからアクセストークンを取得
#
def get_access_key():
key_path = os.getenv('KEY_PATH') + "/versions/1"
# アクセストークン取得
response = sm_client.access_secret_version(name=key_path)
# アクセストークンをデコード
access_token = response.payload.data.decode("UTF-8")
return access_token

View file

@ -1,5 +0,0 @@
functions-framework==3.*
Flask
google-cloud-storage
google-cloud-secret-manager
hubspot-api-client

View file

@ -1,3 +0,0 @@
MIITEL_URL=https://datacom.miitel.jp/
PROJECT_ID=datacom-poc
MODEL_ID=gemini-2.5-flash

View file

@ -1,3 +0,0 @@
MIITEL_URL: https://datacom.miitel.jp/
PROJECT_ID: datacom-poc
MODEL_ID: gemini-2.5-flash

View file

@ -1,3 +0,0 @@
MIITEL_URL: https://datacom.miitel.jp/
PROJECT_ID: rational-timing-443808-u0
MODEL_ID: gemini-2.5-flash

View file

@ -1,35 +0,0 @@
#!/bin/bash
# プロジェクトIDを設定
PROJECT_ID="datacom-poc"
# デプロイする関数名
FUNCTION_NAME="mrt-generate-meeting-minutes"
# 関数のエントリポイント
ENTRY_POINT="handle_request"
# ランタイム
RUNTIME="python312"
# リージョン
REGION="asia-northeast1"
# 環境変数ファイル
ENV_VARS_FILE=".env_dev"
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
# デプロイコマンド
gcloud functions deploy $FUNCTION_NAME \
--gen2 \
--region $REGION \
--runtime $RUNTIME \
--source=./source \
--trigger-http \
--cpu=0.5 \
--memory=1Gi \
--no-allow-unauthenticated \
--entry-point $ENTRY_POINT \
--env-vars-file $ENV_VARS_FILE

View file

@ -1,132 +0,0 @@
import functions_framework
import vertexai
from vertexai.generative_models import GenerativeModel, ChatSession
from google.cloud import storage
from google.cloud import secretmanager
import json
import requests
import os
from datetime import datetime, timezone, timedelta
import gzip
# Storage クライアントを作成
storage_client = storage.Client()
sm_client = secretmanager.SecretManagerServiceClient()
@functions_framework.http
def handle_request(request):
# POSTリクエストの処理
if request.method != 'POST':
return ({'error': 'Method not allowed'}, 405, {'Content-Type': 'application/json'})
try:
request_json = request.get_json()
print(request_json)
project_id = os.getenv("PROJECT_ID")
miitel_url = os.getenv("MIITEL_URL")
video_info = request_json["video"]
access_permission = video_info["access_permission"]
video_id = video_info["id"] # 会議履歴ID
host_name = video_info["host"]["user_name"] # ホストユーザー名
host_id = video_info["host"]["login_id"] # ホストユーザーID
starts_at = video_info["starts_at"] # 開始日時
ends_at = video_info["ends_at"] # 終了日時
video_url = miitel_url + "app/video/" + video_id # 会議履歴URL
title = video_info["title"] # 会議タイトル
print("会議タイトル",title)
# 閲覧制限のない会議のみ生成
if access_permission != "EVERYONE":
return (json.dumps({"status": "end"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
# 社外ミーティングのみ議事録作成
if "" not in title or "社内" in title:
return (json.dumps({"status": "end"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
# 議事録ファイル名
jst_date_str = generate_jst_date(starts_at) # 開始日時をJSTに変換
file_name = f"{jst_date_str} {title} {host_name}"
print(file_name)
# 議事録作成
speech_recognition = video_info["speech_recognition"]["raw"] # 文字起こしデータ
minutes_text = create_minutes(project_id,speech_recognition)
print("議事録作成完了")
# テキスト内容をセット
minutes = f"会議履歴URL{video_url}\n"
minutes += f"担当者:{host_name}\n\n"
minutes += minutes_text
response_data = {
"status": "next", # ステータス
"title": title, # 会議タイトル
"host_id": host_id, # ホストユーザーID
"host_name": host_name, # ホストユーザー名
"video_url": video_url, # 会議履歴URL
"starts_at": starts_at, # 開始日時
"ends_at": ends_at, # 終了日時
"file_name": file_name, # 議事録ファイル名
"minutes": minutes, # 議事録内容
}
return (json.dumps(response_data, ensure_ascii=False), 200, {"Content-Type": "application/json"})
except Exception as e:
# エラー
error_response = {
"error": str(e) #エラー内容
}
print(str(e))
return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー
def generate_jst_date(starts_at):
# UTCの文字列をdatetimeオブジェクトに変換
utc_datetime = datetime.fromisoformat(starts_at)
# JSTへの変換
jst_timezone = timezone(timedelta(hours=9)) # JSTはUTC+9
jst_datetime = utc_datetime.astimezone(jst_timezone)
# yyyy-MM-dd形式にフォーマット
jst_date_str = jst_datetime.strftime("%Y年%m月%d")
return jst_date_str
def create_minutes(project_id,speech_recognition):
location = "us-central1"
model_id = os.getenv("MODEL_ID")
# print("モデルID:", model_id)
vertexai.init(project=project_id, location=location)
model = GenerativeModel(model_id)
# print("モデル初期化完了")
prompt = f"""
あなたは議事録作成のプロフェッショナルです以下の文字起こし結果は営業マンが録音した商談の文字起こしです以下の制約条件に従い最高の商談報告の議事録を作成してください
制約条件:
1. 文字起こし結果にはAIによる書き起こしミスがある可能性を考慮してください
2. 冒頭に主要な決定事項アクションアイテムをまとめてください
3. 議論のポイントを議題ごとに要約してください
4. 見出しや箇条書きを用いて情報が探しやすい構造で簡潔かつ明瞭に記述してください
5. 要約は500文字以内に収めてください
6. 箇条書き形式で簡潔にまとめてください
7. マークダウン記法は使わず各項目を等を使って見やすくしてください
文字起こし結果
{speech_recognition}
"""
# print("-------------プロンプト-------------")
# print(prompt[:1000])
# print("-------------議事録作成-------------")
response = model.generate_content(prompt)
# print(response.text)
return response.text

View file

@ -1,5 +0,0 @@
functions-framework==3.*
google-cloud-storage
google-cloud-aiplatform
google-cloud-secret-manager
pydrive2

BIN
functions/generate_minutes/.DS_Store vendored Normal file

Binary file not shown.

View file

@ -0,0 +1,27 @@
# This file specifies files that are *not* uploaded to Google Cloud
# using gcloud. It follows the same syntax as .gitignore, with the addition of
# "#!include" directives (which insert the entries of the given .gitignore-style
# file at that point).
#
# For more information, run:
# $ gcloud topic gcloudignore
#
.gcloudignore
# If you would like to upload your .git directory, .gitignore file or files
# from your .gitignore file, remove the corresponding line
# below:
.git
.gitignore
node_modules
.env_dev
.env_prod
deploy_function_dev.sh
deploy_function_prod.sh
files/
package-lock.json

View file

@ -4,13 +4,13 @@
PROJECT_ID="datacom-poc"
# デプロイする関数名
FUNCTION_NAME="mrt-create-minutes"
FUNCTION_NAME="generate_minutes"
# 関数のエントリポイント
ENTRY_POINT="handle_request"
ENTRY_POINT="helloHttp"
# ランタイム
RUNTIME="python312"
RUNTIME="nodejs22"
# リージョン
REGION="asia-northeast1"
@ -21,13 +21,12 @@ ENV_VARS_FILE=".env_dev"
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
# デプロイコマンド
gcloud functions deploy $FUNCTION_NAME \
--gen2 \
--region $REGION \
--runtime $RUNTIME \
--source=./source \
--trigger-http \
--no-allow-unauthenticated \
--source=. \
--entry-point $ENTRY_POINT \
--env-vars-file $ENV_VARS_FILE
--env-vars-file $ENV_VARS_FILE \
--trigger-http \
--timeout 600s \

View file

@ -0,0 +1,16 @@
// src/index.ts
import express from "express";
import type { Express } from "express";
import router from "./src/apiRouter";
const app: Express = express();
app.use("/api", router);
export const helloHttp = app;
// export const helloHttp = (req: Request, res: Response): void => {
// // console.log("Function invoked:", new Date().toISOString());
// console.log("path:", req.path, "method:", req.method);
// const name = (req.query.name as string) ?? "World";
// res.status(200).send(`Hello, ${name} from TypeScript Cloud Functions!`);
// };

View file

@ -0,0 +1,35 @@
{
"name": "generate_minutes",
"version": "1.0.0",
"main": "dist/index.js",
"scripts": {
"build": "tsc",
"start": "npm run build && functions-framework --target=helloHttp --port=8080 --source=dist/index.js",
"debug": "dotenv -e .env_prod -- node --inspect node_modules/.bin/functions-framework --source=dist/index.js --target=helloHttp",
"watch": "concurrently \"dotenv -e .env_prod -- npm run build -- --watch\" \"dotenv -e .env_prod -- nodemon --watch ./dist/ --exec npm run debug\""
},
"devDependencies": {
"@google-cloud/functions-framework": "^3.0.0",
"@types/archiver": "^7.0.0",
"@types/express": "^4.17.0",
"@types/node": "^20.0.0",
"dotenv-cli": "^11.0.0",
"nodemon": "^3.1.11",
"ts-node": "^10.9.2",
"typescript": "^5.0.0"
},
"dependencies": {
"@google-cloud/storage": "^7.17.3",
"@google/genai": "^1.30.0",
"@hubspot/api-client": "^13.4.0",
"archiver": "^7.0.1",
"cerceis-lib": "^2.5.0",
"concurrently": "^9.2.1",
"dotenv": "^17.2.3",
"express": "^4.21.2",
"fast-fuzzy": "^1.12.0",
"googleapis": "^105.0.0",
"marked": "^17.0.1",
"zod": "^4.1.13"
}
}

View file

@ -0,0 +1,24 @@
export const GEMINI_MODEL_ID = "gemini-2.5-pro";
export const DEBUG = false;
export const CLOUD_STORAGE_MASTER_FOLDER_NAME = "master";
export const CLOUD_STORAGE_LOG_FOLDER_NAME = "new_request_log";
export const COMPANIES_FILE_NAME = "companies.json";
export const OWNERS_FILE_NAME = "owners.json";
export const LEGAL_SUFFIX = /(株式会社|(株)|\(株\)|有限会社|合同会社|Inc\.?|Corp\.?|Co\.?Ltd\.?)/;
export const Y_FORMAT = 'yyyy';
export const YM_FORMAT = 'yyyyMM'
export const DATETIME_FORMAT = 'yyyy-MM-dd hh:mm:ss';
export const DATE_FORMAT = 'yyyy年MM月dd日';
export const FOLDER_MIMETYPE = 'application/vnd.google-apps.folder';
export const DOCUMENT_MIMETYPE = 'application/vnd.google-apps.document';
export const SHEET_MIMETYPE = 'application/vnd.google-apps.spreadsheet';
export const LOG_SHEET_HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"]
export const MAX_RETRY_COUNT = 3;
export const ROOP_DELAY_MS = 5000;

View file

@ -0,0 +1,174 @@
import express from "express";
import zlib from "zlib";
import { storageController } from "./logics/storage";
import { logUploadProcess, MiiTelWebhookSchema, processRequest, testProcess } from "./logics/process";
import { hubspotController } from "./logics/hubspot";
import { createCustomError, responseError } from "./logics/error";
import { CLOUD_STORAGE_LOG_FOLDER_NAME, CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, OWNERS_FILE_NAME } from "../serverConfig";
import { Delay } from "cerceis-lib";
import path from "path";
import fs from "fs";
import { fuzzyMatchController } from "./logics/fuzzyMatch";
const router = express.Router();
// Process Request From Miitel Webhook
router.post("/miitel", async (req, res) => {
try {
const body = req.body;
if('challenge' in body) return res.status(200).contentType('text/plain').send(body.challenge);
const parsedBody = MiiTelWebhookSchema.safeParse(body);
if (!parsedBody.success) throw createCustomError("ZOD_FAILED");
const videoInfo = parsedBody.data.video;
const gzipped = zlib.gzipSync(JSON.stringify(body));
await storageController.saveToGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${videoInfo.id}.json.gz`, gzipped, 'application/gzip');
await processRequest(videoInfo);
// if(!result) throw
return res.status(200).send("ok");
} catch(err) {
return responseError(err, res);
}
});
// Refresh Master Data Everyday
router.post("/dailyBatch", async (req, res) => {
try {
console.log("Starting daily batch process...");
// export companies to GCS
const companies = await hubspotController.getCompanies();
if(!companies) throw createCustomError("GET_COMPANIES_FAILED");
await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, JSON.stringify(companies), 'application/json');
// export owners to GCS
const owners = await hubspotController.getOwners();
if(!owners) throw createCustomError("GET_OWNERS_FAILED");
await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME, JSON.stringify(owners), 'application/json');
res.status(200).send("Daily batch executed.");
} catch (error) {
console.error("Error in daily batch:", error);
return res.status(400).send("Error executing daily batch.");
}
});
// Check Log By Meeting ID
router.post("/getLog", async (req, res) => {
console.log(req.body);
const meetingId = req.body.meetingId;
const exist = await storageController.existsInGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${meetingId}.json.gz`);
console.log("Log exists:", exist);
const log = await storageController.loadFromGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, meetingId + ".json.gz");
if(!log) throw Error();
const params = MiiTelWebhookSchema.parse(JSON.parse(log));
// console.log(params)
res.send(params);
});
// Check Log By Meeting ID
router.post("/reExecute", async (req, res) => {
try {
console.log(req.body);
const meetingId = req.body.meetingId;
const newTitle = req.body.newTitle;
const log = await storageController.loadFromGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${meetingId}.json.gz`);
if(!log) throw Error();
const params = MiiTelWebhookSchema.safeParse(JSON.parse(log));
console.log(params);
if(!params.success) throw createCustomError("ZOD_FAILED");
params.data.video.title = newTitle;
// console.log(params.data.video)
// await processRequest(params.data.video);
res.send(log);
} catch(error) {
console.log(error);
res.status(400).send("Failed");
}
});
// 過去のログを全てGoogle Driveへアップロード
// router.post("/logUpload", async (req, res) => {
// try {
// const list = await storageController.getFileList();
// if(!list) throw createCustomError("GET_FILES_FAILED");
// console.log("Total files to process:", list.length);
// const failedFiles: string[] = [];
// let count = 0;
// const tmplist = list.slice(1600,1800);
// for(const l of tmplist){
// console.log(l);
// count++;
// console.log(`Processing file ${count} of ${tmplist.length}`);
// const fileName = l.split('/')[1]
// const log = await storageController.loadFromGCS('request_log', fileName);
// if(!log) {
// failedFiles.push(fileName);
// continue;
// };
// const parsedLog = MiiTelWebhookSchema.safeParse(JSON.parse(log));
// if(!parsedLog.success) throw createCustomError("ZOD_FAILED");
// console.log(parsedLog.data.video.title);
// const result = await logUploadProcess(parsedLog.data.video);
// if(!result) failedFiles.push(fileName);
// await Delay(500);
// }
// const outputPath = path.join(__dirname, "../log/", 'failedFiles.json');
// fs.writeFileSync(outputPath, JSON.stringify(failedFiles, null, 2));
// res.send('ok');
// } catch(error) {
// console.log(error);
// res.status(400).send("Failed");
// }
// });
// router.post("/deleteFile", async (req, res) => {
// console.log(req.body);
// const fileId = req.body.fileId;
// const googleAuth = await googleDriveController.getAuth();
// const driveClilent = googleDriveController.getDriveClient(googleAuth);
// await googleDriveController.deleteFile(driveClilent, fileId);
// res.send('ok');
// });
router.post("/test", async (req, res) => {
try {
await testProcess();
res.send("ok");
} catch (error) {
console.error(error);
res.status(400).send("Error in /test endpoint");
}
});
router.post("/alertTest", async (_req, res) => {
res.status(500).send("Error");
});
// router.post("/debug", async (req, res) => {
// try {
// const a = await fuzzyMatchController.searchMatchedCompany("Aコープ九");
// console.log(a);
// res.send("ok");
// } catch (error) {
// console.error(error);
// res.status(400).send("Error in /test endpoint");
// }
// });
export default router;

View file

@ -0,0 +1,110 @@
import { GoogleGenAI } from "@google/genai";
const aiClient = new GoogleGenAI({
apiKey: process.env.GEMINI_API_KEY,
});
export const aiController = {
generateMinutes: async(text: string): Promise<string | null> => {
const prompt = `
[]
#
* **:** DX
* **:**
* A:
* B: POSメーカー等API連携
* **:** Speaker A, B等
#
| | | |
| :--- | :--- | :--- |
| **** | | |
| **** | ID-POS | |
| | ArmBox | |
| | RV | |
| | MS-View | |
| | AWS | |
| | CustomerJournal (CJ) | / |
| | Tiramisu | |
| | TerraMap | |
| | d@Journal | |
| | d3 | |
| | D-PLAN | |
| | PV | |
| | FreshO2 | |
| | Point View | |
| | Retail View | |
| **** | RFP | |
| | | |
| | | |
| | DWH | |
| | POS | |
| | CUBIC | |
| | NOCC | |
| ****| | |
| | | |
#
1. **:** :Tiramisuが
2. **:**
*
*
3. **:**
*
* 調
4. **:**
#
##
* **:** / / etc.
* **:**
* **:** : / :
* **:** 300
##
*
* ...
## ToDo
| | | |
| :--- | :--- | :--- |
| | ... | ... |
| | ... | ... |
##
### 1
* **:**
* **:**
* **:**
### 2
...
##
*
---
[]:
${text}
`;
try {
const response = await aiClient.models.generateContent({
model: process.env.GEMINI_MODEL_ID || "gemini-2.5-flash",
contents: prompt,
})
if(!response.text) return null;
console.log("AI Response:", response.text);
return response.text;
} catch (error) {
console.error("AI Generation Error:", error);
return null;
}
}
};

View file

@ -0,0 +1,32 @@
export const dateController = {
convertToJst: (date: string): Date => {
const utcDate = new Date(date);
const jstDate = utcDate.toLocaleString('ja-JP', { timeZone: 'Asia/Tokyo' })
return new Date(jstDate);
},
getFormattedDate: (date: Date, format: string): string => {
const symbol = {
M: date.getMonth() + 1,
d: date.getDate(),
h: date.getHours(),
m: date.getMinutes(),
s: date.getSeconds(),
};
const formatted = format.replace(/(M+|d+|h+|m+|s+)/g, (v) =>
((v.length > 1 ? "0" : "") + symbol[v.slice(-1) as keyof typeof symbol]).slice(-2)
);
return formatted.replace(/(y+)/g, (v) =>
date.getFullYear().toString().slice(-v.length)
);
},
getCurrentJstTime: (format: string) => {
const utcDate = new Date().toUTCString();
const jstDate = dateController.convertToJst(utcDate);
const jstStr = dateController.getFormattedDate(jstDate, format);
return jstStr;
// return dateController.getFormattedDate(utcDate, "yyyy/MM/dd hh:mm:ss");
},
};

View file

@ -0,0 +1,46 @@
import { Response } from "express";
import z from "zod";
import { ERROR_DEFINITIONS, ErrorKey } from "../stores/errorCodes";
import { Delay } from "cerceis-lib";
import { MAX_RETRY_COUNT, ROOP_DELAY_MS } from "../../serverConfig";
const CustomErrorSchema = z.object({
code: z.string(),
message: z.string(),
statusCode:z.number(),
});
export type CustomError = z.infer<typeof CustomErrorSchema>;
export const createCustomError = (key: ErrorKey): CustomError => {
const errorInfo = ERROR_DEFINITIONS[key];
return CustomErrorSchema.parse(errorInfo);
};
export const responseError = (error: any, res: Response | null = null) => {
if (!CustomErrorSchema.safeParse(error).success) {
console.error(error);
console.error("========== Unknown Error ==========");
if(res) return res.status(500).send('Internal Server Error');
}
const parsedError = CustomErrorSchema.parse(error);
console.error("========== Custom Error ==========");
console.error(`Error Code: ${parsedError.code}\n Message: ${parsedError.message}`);
if(res) return res.status(parsedError.statusCode).send(parsedError.message);
}
export const callFunctionWithRetry = async <T>(fn: () => Promise<T>): Promise<T | null> => {
for(let retryCount = 0; retryCount <= MAX_RETRY_COUNT; retryCount++) {
try {
const result = await fn();
if(!result) throw Error();
return result;
} catch(error) {
if(retryCount === MAX_RETRY_COUNT) return null;
console.warn(`\n\n========== リトライ${retryCount + 1}回目 ==========\n\n`);
await Delay(ROOP_DELAY_MS);
}
}
return null;
};

View file

@ -0,0 +1,51 @@
import { dateController } from "./date";
import archiver from "archiver";
import fs from "fs";
export const fileController = {
createMinutesFileName: (title: string, hostName: string, meetingDateStr: string): string => {
const fileName = `${meetingDateStr} ${title.replace('/', '')} ${hostName}`;
return fileName;
},
extractCompanyNameFromTitle: (title: string) => {
const normalizedTitle = title.replace("【", "").replace("】", "");
const companyName = normalizedTitle.split("様")[0];
return companyName
},
createMinutesContent: (videoUrl: string, hostName: string, minutes: string): string => {
let minutesContent = `会議履歴URL${videoUrl}\n`;
minutesContent += `担当者:${hostName}\n\n`;
minutesContent += minutes;
return minutesContent;
},
createZip: async (body: any, outputPath: string, fileName: string): Promise<boolean> => {
try {
await new Promise((resolve, reject) => {
const output = fs.createWriteStream(outputPath);
const archive = archiver('zip', {
zlib: { level: 9 }
});
output.on('close', () => {
// console.log(archive.pointer() + ' total bytes');
// console.log('archiver has been finalized and the output file descriptor has closed.');
resolve(true);
});
archive.on('error', (err) => {
reject(err);
});
archive.pipe(output);
archive.append(JSON.stringify(body), { name: fileName + '.json' });
archive.finalize();
})
return true;
} catch(error) {
return false;
}
},
};

View file

@ -0,0 +1,63 @@
import { search } from "fast-fuzzy";
import { storageController } from "./storage";
import { CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, LEGAL_SUFFIX } from "../../serverConfig";
import { Company, CompanySchema } from "./hubspot";
import z from "zod";
import { callFunctionWithRetry } from "./error";
export const fuzzyMatchController = {
searchMatchedCompany: async(companyName: string): Promise<Company | null> => {
try {
const companiesJson = await callFunctionWithRetry(() => storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME));
if(!companiesJson) return null;
const parsedCompanies = z.array(CompanySchema).safeParse(JSON.parse(companiesJson));
if(!parsedCompanies.success) return null;
const normalizedCompanyName = fuzzyMatchController.normalizeCompanyName(companyName);
const companies: Company[] = parsedCompanies.data.map((c) => CompanySchema.parse({
id: c.id,
name: c.name,
}));
// Exact Match
const exactMatchedCompany = fuzzyMatchController.searchExactMatchedCompany(normalizedCompanyName, companies);
// console.log(exactMatchedCompanyId);
if(exactMatchedCompany) return exactMatchedCompany;
// Fuzzy Match
const results = search(
fuzzyMatchController.normalizeCompanyName(companyName),
parsedCompanies.data,
{
keySelector: (obj) => fuzzyMatchController.normalizeCompanyName(obj.name),
returnMatchData: true,
threshold: 0.8,
},
);
console.log("===== Search Results =====");
console.log(results);
if(results.length <= 0) return null;
if(results.length === 1) return results[0].item;
if(results.length > 1) {
// 同スコアが複数存在
if(results[0].score === results[1].score) return null;
// トップが単独の場合のみ
return results[0].item;
}
return null;
} catch(error) {
console.error(error);
return null;
}
},
normalizeCompanyName: (companyName: string) => {
return companyName.replace(LEGAL_SUFFIX, '');
},
searchExactMatchedCompany: (companyName: string, companies: Company[]): Company | null => {
for(const company of companies) {
if(companyName === fuzzyMatchController.normalizeCompanyName(company.name)) return company;
};
return null;
},
};

View file

@ -0,0 +1,260 @@
import { docs_v1, drive_v3, google, sheets_v4 } from "googleapis";
import fs from "fs";
import { DEBUG, DOCUMENT_MIMETYPE, LOG_SHEET_HEADER_VALUES, SHEET_MIMETYPE } from "../../serverConfig";
import z from "zod";
import { Readable } from "stream";
const GOOGLE_DRIVE_FOLDER_ID = process.env.GOOGLE_DRIVE_FOLDER_ID;
const SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
export const LogRowDataSchema = z.object({
timestamp: z.string(),
meetingDate: z.string(),
title: z.string(),
matchedCompanyName: z.string(),
ownerName: z.string(),
meetingUrl: z.string(),
documentUrl: z.string(),
hubspotUrl: z.string(),
});
export type LogRowData = z.infer<typeof LogRowDataSchema>
export const googleDriveController = {
getAuth: async (): Promise<any> => {
try {
const credentials = JSON.parse(process.env.SEARVICE_ACCOUNT_CREDENTIALS || "{}");
const auth = await new google.auth.GoogleAuth({
credentials: credentials,
scopes: SCOPES,
});
if (!auth) return null;
return auth;
} catch (error) {
console.error("Error obtaining Google Auth:", error);
return null;
}
},
getDriveClient: (auth: any): drive_v3.Drive => {
// console.log("Google Drive client authenticated.");
const drive = google.drive({ version: "v3", auth: auth });
return drive;
},
getSheetsClient: (auth: any): sheets_v4.Sheets => {
const sheets = google.sheets({ version: "v4", auth: auth });
return sheets;
},
getDocsClient: (auth: any): docs_v1.Docs => {
const docs = google.docs({ version: "v1", auth: auth });
return docs;
},
checkConnection: async (driveClient: drive_v3.Drive): Promise<boolean> => {
try {
const res = await driveClient.files.list({
corpora: 'drive',
driveId: GOOGLE_DRIVE_FOLDER_ID,
pageSize: 1,
fields: "files(id, name)",
includeItemsFromAllDrives: true,
includeTeamDriveItems: true,
supportsAllDrives: true
});
console.log("Google Drive connection check successful:", res.data);
return true;
} catch (error) {
console.error("Error checking Google Drive connection:", error);
return false;
}
},
uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string, contentType: string): Promise<string | null> => {
try {
const response = await driveClient.files.create({
requestBody: {
name: fileName,
parents: [folderId],
},
media: {
mimeType: contentType,
body: fs.createReadStream(filePath),
},
supportsAllDrives: true,
});
if(!response.data.id) return null;
return response.data.id;
} catch (error) {
console.error("Error uploading file:", error);
return null;
}
},
searchFileIdByFileName: async (driveClient: drive_v3.Drive, folderId: string, fileName: string): Promise<string | null> => {
try {
const params = googleDriveController.getSearchFileParamsByDebugMode(folderId);
const res = await driveClient.files.list(params);
// console.log("Files:");
// console.log(res.data.files);
if(!res.data.files) return null;
for(const file of res.data.files) {
if(fileName === file.name) {
if(!file.id) return null;
return file.id;
}
}
return null;
} catch (error) {
console.error('Error searching files:', error);
return null;
}
},
getSearchFileParamsByDebugMode: (folderId: string): drive_v3.Params$Resource$Files$List => {
if(DEBUG) {
return {
corpora: 'user',
q: `'${folderId}' in parents`,
pageSize: 10,
fields: "files(id, name)",
includeItemsFromAllDrives: true,
includeTeamDriveItems: true,
supportsAllDrives: true
}
}
return {
corpora: 'drive',
driveId: GOOGLE_DRIVE_FOLDER_ID,
q: `'${folderId}' in parents`,
pageSize: 10,
fields: "files(id, name)",
includeItemsFromAllDrives: true,
includeTeamDriveItems: true,
supportsAllDrives: true
}
},
createNewFile: async (driveClient: drive_v3.Drive, folderId: string, fileName: string, mimeType: string): Promise<string | null> => {
try {
const requestBody = {
name: fileName,
parents: [folderId], // 作成したフォルダのIDを指定
mimeType: mimeType,
};
const file = await driveClient.files.create({
requestBody,
supportsAllDrives: true,
// fields: 'id',
});
console.log('File Id:', file.data);
if (!file.data.id) return null;
return file.data.id;
} catch (error) {
console.error('Error creating file:', error);
return null;
}
},
createMinutesDocument: async(driveClient: drive_v3.Drive, folderId: string, fileName: string, htmlText: string): Promise<string | null> => {
try {
const requestBody = {
name: fileName,
parents: [folderId], // 作成したフォルダのIDを指定
mimeType: DOCUMENT_MIMETYPE,
};
const media = {
mimeType: 'text/html',
body: Readable.from([htmlText])
};
const file = await driveClient.files.create({
requestBody,
media,
supportsAllDrives: true,
// fields: 'id',
});
console.log('File Id:', file.data);
if (!file.data.id) return null;
return file.data.id;
} catch(err) {
console.error('Error creating file:', err);
return null;
}
},
// CAUTION
deleteFile: async (driveClient: drive_v3.Drive, fileId: string) => {
try {
const body = { trashed: true }
const response = await driveClient.files.update({
fileId: fileId,
requestBody: body,
});
console.log('File deleted:', response.data);
} catch (error) {
console.error('Error deleting file:', error);
}
},
// addContentToDocs: async (docsClient: docs_v1.Docs, documentId: string, content: string): Promise<boolean> => {
// try {
// const requestBody: docs_v1.Schema$BatchUpdateDocumentRequest = {
// requests: [
// {
// insertText: {
// text: content,
// location: {
// index: 1,
// }
// }
// }
// ]
// };
// const response = await docsClient.documents.batchUpdate({
// documentId: documentId,
// requestBody: requestBody,
// });
// console.log('Content added to document:', response.data);
// return true;
// } catch (error) {
// console.error('Error adding content to document:', error);
// return false;
// }
// },
getLogSheetId: async (driveClient: drive_v3.Drive, sheetsClient: sheets_v4.Sheets, folderId: string, fileName: string): Promise<string | null> => {
try {
const existsSheetId = await googleDriveController.searchFileIdByFileName(driveClient, folderId, fileName);
if(existsSheetId) return existsSheetId;
// console.log('=== Create New Sheet ===')
const newSheetId = await googleDriveController.createNewFile(driveClient, folderId, fileName, SHEET_MIMETYPE);
if(!newSheetId) return null;
//
await googleDriveController.insertRowToSheet(sheetsClient, newSheetId, ['※シート名変更厳禁']);
await googleDriveController.insertRowToSheet(sheetsClient, newSheetId, LOG_SHEET_HEADER_VALUES);
return newSheetId;
} catch (error) {
console.error('Error searching files:', error);
return null;
}
},
insertRowToSheet: async (sheetsClient: sheets_v4.Sheets, sheetId: string, rowData: string[] ): Promise<boolean> => {
try {
const body = {
values: [rowData]
}
const params: sheets_v4.Params$Resource$Spreadsheets$Values$Append = {
spreadsheetId: sheetId,
range: 'Sheet1',
valueInputOption: 'USER_ENTERED',
insertDataOption: 'INSERT_ROWS',
requestBody: body,
}
await sheetsClient.spreadsheets.values.append(params);
return true;
} catch (error) {
console.log(error);
return false;
}
},
};

View file

@ -0,0 +1,124 @@
import { Client } from "@hubspot/api-client";
import { AssociationSpecAssociationCategoryEnum } from "@hubspot/api-client/lib/codegen/crm/objects/meetings/models/AssociationSpec";
import { PublicAssociationsForObject } from "@hubspot/api-client/lib/codegen/crm/objects/meetings";
import z, { email } from "zod";
const hubspotClient = new Client({ accessToken: process.env.HUBSPOT_ACCESS_TOKEN });
export const CompanySchema = z.object({
id: z.string(),
name: z.string(),
});
export const OwnerSchema = z.object({
id: z.string(),
email: z.string().optional().default(''),
});
export type Company = z.infer<typeof CompanySchema>;
export type Owner = z.infer<typeof OwnerSchema>;
export const hubspotController = {
check: async(): Promise<boolean | null> => {
try {
const response = await hubspotClient.crm.companies.getAll();
console.log(response.length);
console.log("HubSpot connection check successful.");
return true;
} catch (error) {
console.error("HubSpot connection check failed:", error);
return false;
}
},
getCompanies: async(): Promise<Company[] | null> => {
try {
const allCompanies: Company[] = [];
const limit = 100;
let after: string | undefined = undefined;
for(let i = 0; i < 1000; i++) {
console.log(`Fetching companies, iteration ${i+1}`);
const response = await hubspotClient.crm.companies.basicApi.getPage(limit, after);
// console.log(response.results);
const companies: Company[] = response.results.map((company) => CompanySchema.parse({
id: company.id,
name: company.properties.name ?? '',
}));
allCompanies.push(...companies);
if(response.paging && response.paging.next && response.paging.next.after) {
after = response.paging.next.after;
continue;
}
break;
}
return allCompanies;
} catch (error) {
console.error("Error fetching companies:", error);
return null;
}
},
getOwners: async(): Promise<Owner[] | null> => {
try {
const allOwners: Owner[] = [];
const limit = 100;
let after: string | undefined = undefined;
for(let i = 0; i < 1000; i++) {
console.log(`Fetching owners, iteration ${i+1}`);
const response = await hubspotClient.crm.owners.ownersApi.getPage(undefined,after,limit);
// console.log(response.results);
const owners: Owner[] = response.results.map((owner) => OwnerSchema.parse({
id: owner.id,
email: owner.email,
}));
allOwners.push(...owners);
if(response.paging && response.paging.next && response.paging.next.after) {
after = response.paging.next.after;
continue;
}
break;
}
return allOwners;
} catch (error) {
console.error("Error fetching owners:", error);
return null;
}
},
createMeetingLog: async(companyId: string, title: string, userId: string | null, minutes: string, startsAt: string, endsAt: string ): Promise<boolean> => {
try {
// 改行コードを変換
const minutes_html = minutes.replace("\n", "<br>")
const associations: PublicAssociationsForObject[] = [{
types: [
{associationCategory: AssociationSpecAssociationCategoryEnum.HubspotDefined, associationTypeId: 188},
],
to: {id: companyId},
}];
const properties = {
hs_timestamp: startsAt,
hs_meeting_title: title,
hubspot_owner_id: userId || '',
hs_meeting_body: minutes_html,
hs_meeting_start_time: startsAt,
hs_meeting_end_time: endsAt,
}
const result = await hubspotClient.crm.objects.meetings.basicApi.create({
associations: associations,
properties: properties,
});
return true;
} catch (error) {
console.error("Error creating HubSpot meeting log:", error);
return false;
}
},
searchOwnerIdByEmail: (email: string, owners: Owner[]): string | null => {
for(const owner of owners) {
if(email === owner.email) return owner.id;
}
return null;
},
};

View file

@ -0,0 +1,193 @@
import z from "zod";
import { aiController } from "./ai";
import { dateController } from "./date";
import { googleDriveController, LogRowData, LogRowDataSchema } from "./googleDrive";
import { fileController } from "./file";
import path, { join } from "path";
import fs from "fs";
import { callFunctionWithRetry, createCustomError } from "./error";
import { storageController } from "./storage";
import { CLOUD_STORAGE_MASTER_FOLDER_NAME, DATE_FORMAT, DATETIME_FORMAT, DOCUMENT_MIMETYPE, OWNERS_FILE_NAME, YM_FORMAT } from "../../serverConfig";
import { hubspotController, OwnerSchema } from "./hubspot";
import { fuzzyMatchController } from "./fuzzyMatch";
import { marked } from "marked";
const VideoInfoSchema = z.looseObject({
id: z.string(),
title: z.string(),
starts_at: z.string(),
ends_at: z.string(),
access_permission: z.string(),
host: z.object({
login_id: z.string(),
user_name: z.string(),
}),
speech_recognition: z.object({
raw: z.string(),
})
});
type VideoInfo = z.infer<typeof VideoInfoSchema>;
export const MiiTelWebhookSchema = z.object({
video: VideoInfoSchema,
});
const GOOGLE_DRIVE_FOLDER_ID = process.env.GOOGLE_DRIVE_FOLDER_ID || '';
const MIITEL_REQUEST_LOG_FOLDER_ID = process.env.MIITEL_REQUEST_LOG_FOLDER_ID || '';
const MINUTES_CREATION_HISTORY_FOLDER_ID = process.env.MINUTES_CREATION_HISTORY_FOLDER_ID || '';
const MIITEL_URL = process.env.MIITEL_URL || '';
const HUBSPOT_COMPANY_URL = process.env.HUBSPOT_COMPANY_URL || '';
const FILE_PATH = join(__dirname, "../files/");
let outputPath = '';
export const processRequest = async (videoInfo: VideoInfo) => {
try {
const videoId = videoInfo.id;
const title = videoInfo.title;
const startsAt = videoInfo.starts_at;
const endsAt = videoInfo.ends_at;
const accessPermission = videoInfo.access_permission;
const hostId = videoInfo.host.login_id;
const hostName = videoInfo.host.user_name;
const speechRecognition = videoInfo.speech_recognition.raw;
if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return;
// ===== Init =====
const googleAuth = await googleDriveController.getAuth();
const driveClient = googleDriveController.getDriveClient(googleAuth);
const docsClient = googleDriveController.getDocsClient(googleAuth);
const sheetsClient = googleDriveController.getSheetsClient(googleAuth);
const jstStartsAt = dateController.convertToJst(startsAt);
const meetingDateStr = dateController.getFormattedDate(jstStartsAt, "yyyy年MM月dd日");
const jstEndsAt = dateController.convertToJst(endsAt);
const fileName = fileController.createMinutesFileName(title, hostName, meetingDateStr);
const videoUrl = `${MIITEL_URL}app/video/${videoId}`;
// ===== Save Request Log to Google Drive =====
if (!fs.existsSync(FILE_PATH)) fs.mkdirSync(FILE_PATH, { recursive: true });
outputPath = path.join(FILE_PATH, fileName + '.zip');
const createZip = await fileController.createZip(videoInfo, outputPath, fileName);
if(!createZip) throw createCustomError("CREATE_ZIP_FILE_FAILED");
const logFileId = await callFunctionWithRetry(() => googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, fileName + '.zip', "application/zip"));
if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED");
// ===== Generate Minutes =====
const minutes = await callFunctionWithRetry(() => aiController.generateMinutes(speechRecognition));
console.log(minutes);
if (!minutes) throw createCustomError("AI_GENERATION_FAILED");
const html = await marked.parse(minutes);
let content = `<p>会議履歴URL<a href="${videoUrl}">${videoUrl}</a></p>`;
content += `<p>担当者:${hostName}</p>`;
content += html;
// ===== Upload To Google Drive =====
const documentId = await callFunctionWithRetry(() => googleDriveController.createMinutesDocument(driveClient, GOOGLE_DRIVE_FOLDER_ID, fileName, content));
if (!documentId) throw createCustomError("CREATE_NEW_DOCUMENT_FAILED");
// ===== Create Meeting Log at Hubspot =====
const ownersJson = await callFunctionWithRetry(() => storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME));
if(!ownersJson) throw createCustomError("GET_OWNERS_FAILED");
const parsedOwners = z.array(OwnerSchema).safeParse(JSON.parse(ownersJson));
if(!parsedOwners.success) throw createCustomError("ZOD_FAILED");
const ownerId = hubspotController.searchOwnerIdByEmail(hostId, parsedOwners.data);
const extractedCompanyName = fileController.extractCompanyNameFromTitle(title);
const matchedCompany = await fuzzyMatchController.searchMatchedCompany(extractedCompanyName);
if(matchedCompany) {
const createLogResult = await callFunctionWithRetry(() => hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, content, startsAt, endsAt));
if(!createLogResult) throw createCustomError("CREATE_MEETING_LOG_FAILED");
}
// ===== Apeend Log To SpreadSheet =====
const currentYearMonth = dateController.getCurrentJstTime(YM_FORMAT);
const sheetId = await callFunctionWithRetry(() => googleDriveController.getLogSheetId(driveClient, sheetsClient, MINUTES_CREATION_HISTORY_FOLDER_ID, currentYearMonth));
if(!sheetId) throw createCustomError("GET_SHEET_ID_FAILED");
const currentJstDateTimeStr = dateController.getCurrentJstTime(DATETIME_FORMAT);
const rowData: LogRowData = LogRowDataSchema.parse({
timestamp: currentJstDateTimeStr,
meetingDate: meetingDateStr,
title: title,
matchedCompanyName: matchedCompany?.name ?? '',
ownerName: hostName,
meetingUrl: videoUrl,
documentUrl: `https://docs.google.com/document/d/${documentId}/edit`,
hubspotUrl: matchedCompany ? `${HUBSPOT_COMPANY_URL}/${matchedCompany.id}` : '',
});
const insertResult = await callFunctionWithRetry(() => googleDriveController.insertRowToSheet(sheetsClient, sheetId, Object.values(rowData)));
if(!insertResult) throw createCustomError("INSERT_ROW_FAILED");
fs.unlinkSync(outputPath);
} catch (error) {
fs.unlinkSync(outputPath);
throw error;
}
};
export const logUploadProcess = async (videoInfo: VideoInfo): Promise<boolean | null> => {
try {
const videoId = videoInfo.id;
const title = videoInfo.title;
const startsAt = videoInfo.starts_at;
const endsAt = videoInfo.ends_at;
const accessPermission = videoInfo.access_permission;
const hostId = videoInfo.host.login_id;
const hostName = videoInfo.host.user_name;
const speechRecognition = videoInfo.speech_recognition.raw;
if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return true;
// ===== Init =====
const googleAuth = await googleDriveController.getAuth();
const driveClient = googleDriveController.getDriveClient(googleAuth);
const docsClient = googleDriveController.getDocsClient(googleAuth);
const sheetsClient = googleDriveController.getSheetsClient(googleAuth);
const jstStartsAt = dateController.convertToJst(startsAt);
const meetingDateStr = dateController.getFormattedDate(jstStartsAt, "yyyy年MM月dd日");
const jstEndsAt = dateController.convertToJst(endsAt);
const fileName = fileController.createMinutesFileName(title, hostName, meetingDateStr);
const videoUrl = `${MIITEL_URL}app/video/${videoId}`;
// ===== Save Request Log to Google Drive =====
if (!fs.existsSync(FILE_PATH)) fs.mkdirSync(FILE_PATH, { recursive: true });
outputPath = path.join(FILE_PATH, fileName + '.zip');
const createZip = await fileController.createZip(videoInfo, outputPath, fileName);
if(!createZip) throw createCustomError("CREATE_ZIP_FILE_FAILED");
const logFileId = await callFunctionWithRetry(() => googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, `${fileName}.zip`, "application/zip"));
if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED");
fs.unlinkSync(outputPath);
return true;
} catch(error) {
console.log(error);
fs.unlinkSync(outputPath);
return false;
}
};
export const testProcess = async () => {
try {
// Google Drive 接続確認
const googleAuth = await googleDriveController.getAuth();
const driveClilent = googleDriveController.getDriveClient(googleAuth);
const driveResponse = await googleDriveController.checkConnection(driveClilent);
if(!driveResponse) throw createCustomError("CONNECT_GOOGLE_DRIVE_FAILED");
// Hubspot 接続確認
const hubspotResponse = await hubspotController.check();
if(!hubspotResponse) throw createCustomError("CONNECT_HUBSPOT_FAILED");
return;
} catch(error) {
throw error;
}
};

View file

@ -0,0 +1,67 @@
import { Storage } from "@google-cloud/storage";
import { Files } from "@google/genai";
import zlib from "zlib";
import { CLOUD_STORAGE_LOG_FOLDER_NAME } from "../../serverConfig";
const csClient = new Storage({projectId: process.env.PROJECT_ID});
const BUCKET_NAME = process.env.CLOUD_STORAGE_BUCKET_NAME || '';
const bucket = csClient.bucket(BUCKET_NAME);
export const storageController = {
saveToGCS: async(folder: string, filename: string, content: any, contentType: string) => {
const file = bucket.file((`${folder}/${filename}`));
await file.save(content, {
contentType: contentType,
})
},
loadFromGCS: async(folder: string, filename: string): Promise<string | null> => {
const file = bucket.file(`${folder}/${filename}`);
console.log("loading file:", `${folder}/${filename}`);
try {
const [data] = await file.download();
return zlib.gunzipSync(data).toString("utf-8");
} catch (err: any) {
return null;
}
},
loadJsonFromGCS: async(folder: string, filename: string): Promise<string | null> => {
const file = bucket.file(`${folder}/${filename}`);
// console.log("loading file:", file.name);
try {
const [data] = await file.download();
return data.toString("utf-8");
} catch (err: any) {
return null;
}
},
existsInGCS: async(folder: string, filename: string): Promise<boolean> => {
const file = bucket.file((`${folder}/${filename}`));
console.log("checking file:", file.name);
try {
const [exist] = await file.exists();
return exist;
} catch (err: any) {
return false;
}
},
getFileList: async(): Promise<string[] | null> => {
try {
const results = await bucket.getFiles({
prefix: 'request_log/',
});
const files = results[0];
files.sort((a, b) => {
if(!a.metadata.timeCreated || !b.metadata.timeCreated) return 0;
const timeA = new Date(a.metadata.timeCreated).getTime();
const timeB = new Date(b.metadata.timeCreated).getTime();
return timeA - timeB;
});
// for(const f of files[0]) {
// list.push(f.name);
// }
return files.map((f) => f.name);
} catch(error) {
return null;
}
}
};

View file

@ -0,0 +1,37 @@
// errorDefinitions.ts
export const ERROR_DEFINITIONS = {
ZOD_FAILED: { code: "E1003", message: "zodのチェックが失敗しました", statusCode: -1 },
// Google Drive関連
// 議事録Google Docsの作成アップロード失敗
CONNECT_GOOGLE_DRIVE_FAILED: { code: "E2001", message: "ファイル一覧取得に失敗しました", statusCode: 500 },
GET_FOLDER_ID_FAILED: { code: "E2002", message: "フォルダID取得に失敗しました", statusCode: 500 },
GET_SHEET_ID_FAILED: { code: "E2003", message: "スプレッドシートID取得に失敗しました", statusCode: 500 },
CREATE_NEW_DOCUMENT_FAILED: { code: "E2004", message: "ドキュメント作成に失敗しました", statusCode: 500 },
UPLOAD_MINUTES_FAILED: { code: "E2005", message: "議事録のアップロードに失敗しました", statusCode: 500 },
UPLOAD_LOG_FAILED: { code: "E2006", message: "ログファイルのアップロードに失敗しました", statusCode: 500 },
INSERT_ROW_FAILED: { code: "E2007", message: "シートへのデータ追加に失敗しました", statusCode: 500 },
// Hubspot関連
// オーナー情報の取得失敗
CONNECT_HUBSPOT_FAILED: { code: "E3001", message: "ファイル一覧取得に失敗しました", statusCode: 500 },
GET_OWNERS_FAILED: { code: "E3004", message: "オーナー情報の取得に失敗しました", statusCode: 500 },
GET_COMPANIES_FAILED: { code: "E3005", message: "会社情報の取得に失敗しました", statusCode: 500 },
GET_FILES_FAILED: { code: "E3010", message: "ファイルの取得に失敗しました", statusCode: 500 },
CREATE_MEETING_LOG_FAILED: { code: "E3011", message: "ミーティングログ作成に失敗しました", statusCode: 500 },
// AI による議事録生成失敗
AI_GENERATION_FAILED: { code: "E4001", message: "AIによる議事録生成に失敗しました", statusCode: 500 },
CREATE_ZIP_FILE_FAILED: { code: "E3007", message: "ZIPファイルの作成に失敗しました", statusCode: 500 },
} as const;
export type ErrorKey = keyof typeof ERROR_DEFINITIONS;

View file

@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "commonjs",
"outDir": "dist",
"strict": true,
"esModuleInterop": true,
"moduleResolution": "node",
"resolveJsonModule": true,
"skipLibCheck": true
},
// "include": ["", "index.ts"]
}

View file

@ -1,4 +0,0 @@
PROJECT_ID=datacom-poc
LOCATION=asia-northeast1
BUCKET=meeting-report-data
WORKFLOW=mrt-workflow-create-minutes

View file

@ -1,4 +0,0 @@
PROJECT_ID: datacom-poc
LOCATION: asia-northeast1
BUCKET: meeting-report-data
WORKFLOW: mrt-workflow-create-minutes

View file

@ -1,4 +0,0 @@
PROJECT_ID: rational-timing-443808-u0
LOCATION: asia-northeast1
BUCKET: meeting-data
WORKFLOW: mrt-workflow-create-minutes

View file

@ -1,33 +0,0 @@
#!/bin/bash
# プロジェクトIDを設定
PROJECT_ID="datacom-poc"
# デプロイする関数名
FUNCTION_NAME="mrt-trigger-minutes-workflow-from-miitel"
# 関数のエントリポイント
ENTRY_POINT="handle_request"
# ランタイム
RUNTIME="python312"
# リージョン
REGION="asia-northeast1"
# 環境変数ファイル
ENV_VARS_FILE=".env_dev"
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
# デプロイコマンド
gcloud functions deploy $FUNCTION_NAME \
--gen2 \
--region $REGION \
--runtime $RUNTIME \
--source=./source \
--trigger-http \
--no-allow-unauthenticated \
--entry-point $ENTRY_POINT \
--env-vars-file $ENV_VARS_FILE

View file

@ -1,75 +0,0 @@
import functions_framework
from google.cloud import storage
from google.cloud.workflows import executions_v1
from google.cloud.workflows.executions_v1.types import Execution
import json
import os
import gzip
# Storage クライアントを作成
cs_client = storage.Client()
wf_client = executions_v1.ExecutionsClient()
@functions_framework.http
def handle_request(request):
# POSTリクエストの処理
if request.method != 'POST':
# 他のメソッドに対するエラーレスポンス
return ({'error': 'Method not allowed'}, 405)
try:
request_json = request.get_json()
print(request_json)
if "challenge" in request_json:
# MiiTelのチャレンジリクエストに対する応答
return (request_json["challenge"], 200, {'Content-Type':'text/plain'})
project_id = os.getenv("PROJECT_ID")
bucket_name = os.getenv("BUCKET") # 共有ドライブID
location = os.getenv("LOCATION") # ワークフローのロケーション
workflow = os.getenv("WORKFLOW") # ワークフロー名
# デバッグ用に保存
save_to_gcs(bucket_name,request_json)
# ワークフロー呼び出し
argument = json.dumps({"video": request_json["video"]})
execution = Execution(argument=argument)
parent = f"projects/{project_id}/locations/{location}/workflows/{workflow}"
print(parent)
response = wf_client.create_execution(request={"parent": parent, "execution": execution})
print(f"Workflow execution started: {response.name}")
return (json.dumps({}), 200, {'Content-Type': 'application/json'})
except Exception as e:
# エラー
error_response = {
"error": str(e) #エラー内容
}
print(str(e))
return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー
def save_to_gcs(bucket_name,request_json):
file_name = request_json["video"]["id"] + ".json.gz"
bucket = cs_client.bucket(bucket_name)
# GCS バケットのブロブを取得
blob = bucket.blob(f"request_log/{file_name}")
# JSONを文字列に変換
json_string = json.dumps(request_json)
# Gzip圧縮
compressed_data = gzip.compress(json_string.encode('utf-8'))
# 圧縮されたデータをアップロード
blob.upload_from_string(compressed_data, content_type='application/gzip')

View file

@ -1,4 +0,0 @@
functions-framework==3.*
Flask
google-cloud-storage
google-cloud-workflows

View file

@ -1,2 +0,0 @@
KEY_PATH=projects/570987459910/secrets/sa-create-minutes-key
FOLDER_ID=0AGT_1dSq66qYUk9PVA

View file

@ -1,2 +0,0 @@
KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key
FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw

View file

@ -1,2 +0,0 @@
KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key
FOLDER_ID: 0AGT_1dSq66qYUk9PVA

View file

@ -1,128 +0,0 @@
import functions_framework
from google.cloud import secretmanager
from google.oauth2 import service_account
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
import json
import os
SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
sm_client = secretmanager.SecretManagerServiceClient()
@functions_framework.http
def handle_request(request):
# POSTリクエストの処理
if request.method != 'POST':
# 他のメソッドに対するエラーレスポンス
return ({'error': 'Method not allowed'}, 405)
try:
request_json = request.get_json()
print(request_json)
folder_id = os.getenv("FOLDER_ID") # 共有ドライブID
file_name = request_json["file_name"] # 会議タイトル
minutes = request_json["minutes"] # 議事録
# Secret Manager からサービスアカウントJSON文字列を取得
service_account_info = get_service_account_info()
# 認証
credentials = get_credentials(service_account_info)
# APIクライアントの構築
drive_service = build("drive", "v3", credentials=credentials)
docs_service = build("docs", "v1", credentials=credentials)
# ファイル作成
document_id = create_new_document(drive_service, folder_id, file_name)
print(f"Created document with ID: {document_id}")
# テキスト内容をセット
append_minutes_to_doc(docs_service, document_id, minutes)
response_data = {
"document_id": document_id, # 作成したドキュメントのID
}
return json.dumps(response_data) , 200, {"Content-Type": "application/json"}
except Exception as e:
# エラー
error_response = {
"error": str(e) #エラー内容
}
print(str(e))
return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー
#
# SecretManagerから秘密鍵を取得
#
def get_service_account_info():
key_path = os.getenv('KEY_PATH') + "/versions/1"
# 秘密鍵取得
response = sm_client.access_secret_version(name=key_path)
# 秘密鍵の値をデコード
secret_key = response.payload.data.decode("UTF-8")
return json.loads(secret_key)
# Google Drive認証
def get_credentials(service_account_info):
credentials = service_account.Credentials.from_service_account_info(
service_account_info,
scopes=SCOPES
)
return credentials
def create_new_document(service,folder_id,title):
"""
Google Sheets APIを使用して新しいスプレッドシートを作成する
:param service: Google Sheets APIのサービスオブジェクト
:param title: スプレッドシートのタイトル
:return: 作成したスプレッドシートのID
"""
file_metadata = {
'name': title,
'parents': [folder_id], # 作成したフォルダのIDを指定
'mimeType': 'application/vnd.google-apps.document',
}
result = (
service.files()
.create(body=file_metadata, fields="id", supportsAllDrives=True)
.execute()
)
return result.get("id")
def append_minutes_to_doc(service, document_id, minutes):
"""
Google Sheets APIを使用してスプレッドシートにログを追加する
:param service: Google Sheets APIのサービスオブジェクト
:param spreadsheet_id: スプレッドシートのID
:param row_data: 追加するログデータリスト形式
"""
requests = [
{
'insertText': {
'location': {
'index': 1,
},
'text': minutes
}
},
]
body = {
'requests': requests
}
# スプレッドシートにログを追加
result = service.documents().batchUpdate(
documentId=document_id,
body=body,
).execute()
return result

View file

@ -1,5 +0,0 @@
functions-framework==3.*
google-cloud-secret-manager
google-api-python-client
google-auth-httplib2
google-auth-oauthlib

View file

@ -30,22 +30,6 @@ resource "google_project_iam_member" "cf_sa_role" {
}
# Cloud Workflows用サービスアカウント
resource "google_service_account" "workflows_sa" {
project = var.project_id
account_id = "mrt-cloudworkflows-sa"
display_name = "Cloud Workflows SA"
}
# SA
resource "google_project_iam_member" "wf_cf_role" {
for_each = toset(["roles/cloudfunctions.invoker","roles/run.invoker"])
project = var.project_id
role = each.value
member = "serviceAccount:${google_service_account.workflows_sa.email}"
}
# API Gateway用サービスアカウント
resource "google_service_account" "gateway_sa" {
project = var.project_id
@ -62,17 +46,17 @@ resource "google_project_iam_member" "gateway_role" {
}
# cloud build用サービスアカウント
resource "google_service_account" "cloudbuild_sa" {
# Scheduler実行用サービスアカウント
resource "google_service_account" "cf_scheduler_sa" {
project = var.project_id
account_id = "mrt-cloudbuild-sa"
display_name = "Cloud Build 用サービスアカウント"
account_id = "mrt-scheduler-sa"
display_name = "Cloud Functions 起動用サービスアカウント"
}
# SA
resource "google_project_iam_member" "cloudbuild_role" {
for_each = toset(["roles/cloudbuild.builds.builder","roles/storage.objectAdmin", "roles/artifactregistry.writer", "roles/developerconnect.readTokenAccessor", "roles/cloudfunctions.developer","roles/workflows.admin", "roles/iam.serviceAccountUser"])
resource "google_project_iam_member" "scheduler_role" {
for_each = toset(["roles/cloudfunctions.invoker","roles/run.invoker"])
project = var.project_id
role = each.value
member = "serviceAccount:${google_service_account.cloudbuild_sa.email}"
member = "serviceAccount:${google_service_account.cf_scheduler_sa.email}"
}

View file

@ -10,37 +10,22 @@ variable "region" {
variable "function_name" {
type = string
default = "mrt-create-log-sheet"
default = "generate-minutes"
}
# Scheduler実行用サービスアカウント
resource "google_service_account" "cf_scheduler_sa" {
project = var.project_id
account_id = "mrt-scheduler-sa"
display_name = "Cloud Functions 起動用サービスアカウント"
}
# SA
resource "google_project_iam_member" "scheduler_role" {
for_each = toset(["roles/cloudfunctions.invoker","roles/run.invoker"])
# 3 Function Scheduler
resource "google_cloud_scheduler_job" "daily_cf_trigger" {
project = var.project_id
role = each.value
member = "serviceAccount:${google_service_account.cf_scheduler_sa.email}"
}
# 10 Function Scheduler
resource "google_cloud_scheduler_job" "monthly_cf_trigger" {
project = var.project_id
name = "monthly-cf-trigger"
description = "Invoke Cloud Function on the 1st of each month at 00:00"
name = "daily-cf-trigger"
description = "Invoke Cloud Function everyday at 03:00"
region = var.region
schedule = "0 0 1 * *"
schedule = "0 3 * * *"
time_zone = "Asia/Tokyo"
http_target {
uri = "https://${var.region}-${var.project_id}.cloudfunctions.net/${var.function_name}"
uri = "https://${var.region}-${var.project_id}.cloudfunctions.net/${var.function_name}/api/dailyBatch"
http_method = "POST"
oidc_token {
service_account_email = google_service_account.cf_scheduler_sa.email

View file

@ -1,14 +0,0 @@
#!/bin/bash
# 環境変数
PROJECT_ID="datacom-poc"
WORKFLOW_NAME="mrt-workflow-create-minutes"
gcloud auth application-default set-quota-project $PROJECT_ID
gcloud config set project $PROJECT_ID
gcloud workflows deploy $WORKFLOW_NAME \
--source=main.yaml \
--location=asia-northeast1

View file

@ -1,71 +0,0 @@
main:
params: [input]
steps:
- initialize:
assign:
- project_id: ${sys.get_env("GOOGLE_CLOUD_PROJECT_ID")}
- create_hubspot_meeting_log_result: {}
- upload_minutes_to_drive_result: {}
- generate_meeting_minutes:
call: http.post
args:
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-generate-meeting-minutes"}
body:
video: ${input.video}
auth:
type: OIDC
result: generate_meeting_minutes_result
- conditinal_switch:
switch:
- condition: ${generate_meeting_minutes_result.body.status != "end"}
steps:
- parallel_execute:
parallel:
shared:
[
create_hubspot_meeting_log_result,
upload_minutes_to_drive_result,
]
branches:
- create_hubspot_meeting_log_branch:
steps:
- create_hubspot_meeting_log:
call: http.post
args:
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-create-hubspot-meeting-log"}
body:
title: ${generate_meeting_minutes_result.body.title}
host_id: ${generate_meeting_minutes_result.body.host_id}
starts_at: ${generate_meeting_minutes_result.body.starts_at}
ends_at: ${generate_meeting_minutes_result.body.ends_at}
minutes: ${generate_meeting_minutes_result.body.minutes}
auth:
type: OIDC
result: create_hubspot_meeting_log_result
- upload_minutes_to_drive_branch:
steps:
- upload-minutes-to-drive:
call: http.post
args:
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-upload-minutes-to-drive"}
body:
file_name: ${generate_meeting_minutes_result.body.file_name}
minutes: ${generate_meeting_minutes_result.body.minutes}
auth:
type: OIDC
result: upload_minutes_to_drive_result
- append_log_to_sheet:
call: http.post
args:
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-append-log-to-sheet"}
body:
title: ${generate_meeting_minutes_result.body.title}
host_name: ${generate_meeting_minutes_result.body.host_name}
video_url: ${generate_meeting_minutes_result.body.video_url}
starts_at: ${generate_meeting_minutes_result.body.starts_at}
matched_company_id: ${create_hubspot_meeting_log_result.body.matched_company_id}
matched_company_name: ${create_hubspot_meeting_log_result.body.matched_company_name}
document_id: ${upload_minutes_to_drive_result.body.document_id}
auth:
type: OIDC
result: append_log_to_sheet_result