python -> node.js
This commit is contained in:
parent
092f2ec0f3
commit
395fba645d
62 changed files with 726 additions and 1702 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -19,4 +19,5 @@ dist/
|
|||
.env
|
||||
.env_prod
|
||||
credentials.json
|
||||
credentials_dev.json
|
||||
package-lock.json
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
KEY_PATH=projects/32472615575/secrets/sa-access-google-drive-key
|
||||
LOG_FOLDER_ID=1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
|
||||
MEETING_FOLDER_ID=1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
|
||||
HUBSPOT_COMPANY_URL=https://app-na2.hubspot.com/contacts/242960467/record/0-2
|
||||
MODE=dev
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key
|
||||
LOG_FOLDER_ID: 1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
|
||||
MEETING_FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
|
||||
HUBSPOT_COMPANY_URL: https://app-na2.hubspot.com/contacts/242960467/record/0-2
|
||||
MODE: dev
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key
|
||||
LOG_FOLDER_ID: 1arL6AxpvA7N6Umg4wdrdAcRWBdKc-Jfb
|
||||
MEETING_FOLDER_ID: 0AGT_1dSq66qYUk9PVA
|
||||
HUBSPOT_COMPANY_URL: https://app.hubspot.com/contacts/22400567/record/0-2
|
||||
MODE: production
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# プロジェクトIDを設定
|
||||
PROJECT_ID="datacom-poc"
|
||||
|
||||
# デプロイする関数名
|
||||
FUNCTION_NAME="mrt-append-log-to-sheet"
|
||||
|
||||
# 関数のエントリポイント
|
||||
ENTRY_POINT="handle_request"
|
||||
|
||||
# ランタイム
|
||||
RUNTIME="python312"
|
||||
|
||||
# リージョン
|
||||
REGION="asia-northeast1"
|
||||
|
||||
# 環境変数ファイル
|
||||
ENV_VARS_FILE=".env_dev"
|
||||
|
||||
gcloud auth application-default set-quota-project $PROJECT_ID
|
||||
gcloud config set project $PROJECT_ID
|
||||
|
||||
# デプロイコマンド
|
||||
gcloud functions deploy $FUNCTION_NAME \
|
||||
--gen2 \
|
||||
--region $REGION \
|
||||
--runtime $RUNTIME \
|
||||
--source=./source \
|
||||
--trigger-http \
|
||||
--no-allow-unauthenticated \
|
||||
--entry-point $ENTRY_POINT \
|
||||
--env-vars-file $ENV_VARS_FILE
|
||||
|
|
@ -1,267 +0,0 @@
|
|||
import functions_framework
|
||||
from google.cloud import secretmanager
|
||||
from google.oauth2 import service_account
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.errors import HttpError
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
|
||||
sm_client = secretmanager.SecretManagerServiceClient()
|
||||
|
||||
|
||||
SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
|
||||
HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"]
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
# POSTリクエストの処理
|
||||
if request.method != 'POST':
|
||||
return ('', 405, {'Allow': 'POST', 'Content-Type': 'application/json'}) # メソッドがPOSTでない場合は405エラーを返す
|
||||
|
||||
"""Shows basic usage of the Drive Activity API.
|
||||
|
||||
Prints information about the last 10 events that occured the user's Drive.
|
||||
"""
|
||||
try:
|
||||
request_json = request.get_json()
|
||||
print(request_json)
|
||||
title = request_json['title'] # 会議タイトル
|
||||
document_id = request_json['document_id'] # 議事録ファイルのID
|
||||
matched_company_id = request_json['matched_company_id'] # マッチした会社ID
|
||||
matched_company_name = request_json['matched_company_name'] # マッチした会社名
|
||||
host_name = request_json['host_name'] # ホストユーザー名
|
||||
video_url = request_json['video_url'] # 会議履歴URL
|
||||
starts_at = request_json['starts_at'] # 開始日時
|
||||
|
||||
log_folder_id = os.getenv("LOG_FOLDER_ID") # 共有ドライブID
|
||||
meeting_folder_id = os.getenv("MEETING_FOLDER_ID") # ミーティングフォルダID
|
||||
hubspot_company_url = os.getenv("HUBSPOT_COMPANY_URL") # HubSpotの会社情報URL
|
||||
mode = os.getenv("MODE") # モード(devまたはprod)
|
||||
|
||||
service_account_info = get_service_account_info()
|
||||
# 認証
|
||||
credentials = get_credentials(service_account_info)
|
||||
|
||||
# APIクライアントの構築
|
||||
drive_service = build("drive", "v3", credentials=credentials)
|
||||
sheet_service = build("sheets", "v4", credentials=credentials)
|
||||
|
||||
|
||||
# 現在日時をJSTに変換
|
||||
jst_now = datetime.now(timezone.utc).astimezone(timezone(timedelta(hours=9)))
|
||||
# JSTの現在日時を文字列に変換
|
||||
ym_str = jst_now.strftime("%Y%m")
|
||||
y_str = jst_now.strftime("%Y")
|
||||
|
||||
|
||||
# 年別のフォルダを検索
|
||||
target_folder = get_directory_files_dev(drive_service, log_folder_id, y_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, log_folder_id, y_str)
|
||||
print("target_folder", target_folder)
|
||||
|
||||
year_folder_id = None
|
||||
if not target_folder:
|
||||
# フォルダが存在しない場合は新規作成
|
||||
year_folder_id = create_new_folder(drive_service, log_folder_id, y_str)
|
||||
else:
|
||||
# フォルダが存在する場合はそのIDを使用
|
||||
year_folder_id = target_folder[0]['id']
|
||||
print("年別のフォルダID:", year_folder_id)
|
||||
|
||||
# スプレッドシートを検索
|
||||
target_files = get_directory_files_dev(drive_service, year_folder_id, ym_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, year_folder_id, ym_str)
|
||||
print("スプレッドシート", target_files)
|
||||
|
||||
if not target_files:
|
||||
print('not found')
|
||||
|
||||
# スプレッドシートを作成
|
||||
spreadsheet_id = create_new_spreadsheet(drive_service, year_folder_id, ym_str)
|
||||
print("スプレッドシートID:", spreadsheet_id)
|
||||
# 注意事項追加
|
||||
append_log_to_sheet(sheet_service, spreadsheet_id, ["※シート名変更厳禁"])
|
||||
# ヘッダーを追加
|
||||
append_log_to_sheet(sheet_service, spreadsheet_id, HEADER_VALUES)
|
||||
|
||||
else:
|
||||
print('found')
|
||||
# ファイルIDを取得
|
||||
spreadsheet_id = target_files[0]['id']
|
||||
|
||||
documnet_url = f"https://docs.google.com/document/d/{document_id}/edit" if document_id else ""
|
||||
hubspot_url = f"{hubspot_company_url}/{matched_company_id}" if matched_company_id else ""
|
||||
# テストログを追加
|
||||
row_data = [jst_now.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
convert_to_jst_ymd(starts_at),
|
||||
title,
|
||||
matched_company_name,
|
||||
host_name,
|
||||
video_url,
|
||||
documnet_url,
|
||||
hubspot_url
|
||||
]
|
||||
append_log_to_sheet(sheet_service, spreadsheet_id, row_data)
|
||||
print("ログを追加しました:", row_data)
|
||||
|
||||
return (json.dumps({"status": "success"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
|
||||
except HttpError as error:
|
||||
# TODO(developer) - Handleerrors from drive activity API.
|
||||
print(f"An error occurred: {error}")
|
||||
|
||||
|
||||
#
|
||||
# SecretManagerから秘密鍵を取得
|
||||
#
|
||||
def get_service_account_info():
|
||||
key_path = os.getenv('KEY_PATH') + "/versions/1"
|
||||
# 秘密鍵取得
|
||||
response = sm_client.access_secret_version(name=key_path)
|
||||
# 秘密鍵の値をデコード
|
||||
secret_key = response.payload.data.decode("UTF-8")
|
||||
return json.loads(secret_key)
|
||||
|
||||
# Google Drive認証
|
||||
def get_credentials(service_account_info):
|
||||
credentials = service_account.Credentials.from_service_account_info(
|
||||
service_account_info,
|
||||
scopes=SCOPES
|
||||
)
|
||||
return credentials
|
||||
|
||||
|
||||
# 開発用マイドライブからのファイルを取得
|
||||
def get_directory_files_dev(service,shared_folder_id, filename):
|
||||
"""
|
||||
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
|
||||
:param filename: ファイル名
|
||||
:param directory_id: ディレクトリID
|
||||
:param pages_max: 最大ページ探索数
|
||||
:return: ファイルリスト
|
||||
"""
|
||||
items = []
|
||||
page = 0
|
||||
pages_max = 10 # 最大ページ数
|
||||
while True:
|
||||
page += 1
|
||||
if page == pages_max:
|
||||
break
|
||||
results = service.files().list(
|
||||
corpora="user",
|
||||
includeItemsFromAllDrives=True,
|
||||
includeTeamDriveItems=True,
|
||||
q=f"'{shared_folder_id}' in parents and name = '{filename}' and trashed = false",
|
||||
supportsAllDrives=True,
|
||||
pageSize=10,
|
||||
fields="nextPageToken, files(id, name)").execute()
|
||||
items += results.get("files", [])
|
||||
|
||||
page_token = results.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
return items
|
||||
|
||||
# 本番用共有ドライブからのファイルを取得
|
||||
def get_directory_files_prod(service,shared_folder_id,sub_folder_id,filename):
|
||||
"""
|
||||
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
|
||||
:param filename: ファイル名
|
||||
:param directory_id: ディレクトリID
|
||||
:param pages_max: 最大ページ探索数
|
||||
:return: ファイルリスト
|
||||
"""
|
||||
items = []
|
||||
page = 0
|
||||
pages_max = 10 # 最大ページ数
|
||||
while True:
|
||||
page += 1
|
||||
if page == pages_max:
|
||||
break
|
||||
results = service.files().list(
|
||||
corpora="drive",
|
||||
driveId=shared_folder_id,
|
||||
includeItemsFromAllDrives=True,
|
||||
includeTeamDriveItems=True,
|
||||
q=f"'{sub_folder_id}' in parents and name = '{filename}' and trashed = false",
|
||||
supportsAllDrives=True,
|
||||
pageSize=10,
|
||||
fields="nextPageToken, files(id, name, parents)").execute()
|
||||
items += results.get("files", [])
|
||||
|
||||
page_token = results.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
return items
|
||||
|
||||
def create_new_folder(service, sub_folder_id, title):
|
||||
"""
|
||||
Google Drive APIを使用して新しいフォルダを作成する
|
||||
:param service: Google Drive APIのサービスオブジェクト
|
||||
:param title: フォルダのタイトル
|
||||
:return: 作成したフォルダのID
|
||||
"""
|
||||
file_metadata = {
|
||||
"name": title,
|
||||
"parents": [sub_folder_id], # 共有ドライブのIDを指定
|
||||
"mimeType": "application/vnd.google-apps.folder",
|
||||
}
|
||||
|
||||
result = service.files().create(body=file_metadata, fields="id", supportsAllDrives=True).execute()
|
||||
return result.get('id')
|
||||
|
||||
|
||||
def create_new_spreadsheet(service,folder_id,title):
|
||||
"""
|
||||
Google Sheets APIを使用して新しいスプレッドシートを作成する
|
||||
:param service: Google Sheets APIのサービスオブジェクト
|
||||
:param title: スプレッドシートのタイトル
|
||||
:return: 作成したスプレッドシートのID
|
||||
"""
|
||||
file_metadata = {
|
||||
'name': title,
|
||||
'parents': [folder_id], # 作成したフォルダのIDを指定
|
||||
'mimeType': 'application/vnd.google-apps.spreadsheet',
|
||||
}
|
||||
result = (
|
||||
service.files()
|
||||
.create(body=file_metadata, fields="id", supportsAllDrives=True)
|
||||
.execute()
|
||||
)
|
||||
return result.get("id")
|
||||
|
||||
|
||||
def append_log_to_sheet(service, spreadsheet_id, row_data):
|
||||
"""
|
||||
Google Sheets APIを使用してスプレッドシートにログを追加する
|
||||
:param service: Google Sheets APIのサービスオブジェクト
|
||||
:param spreadsheet_id: スプレッドシートのID
|
||||
:param row_data: 追加するログデータ(リスト形式)
|
||||
"""
|
||||
body = {
|
||||
'values': [row_data]
|
||||
}
|
||||
|
||||
# スプレッドシートにログを追加
|
||||
result = service.spreadsheets().values().append(
|
||||
spreadsheetId=spreadsheet_id,
|
||||
range='Sheet1',
|
||||
valueInputOption="USER_ENTERED",
|
||||
insertDataOption='INSERT_ROWS',
|
||||
body=body,
|
||||
).execute()
|
||||
print(f"{result.get('updates').get('updatedCells')} cells appended.")
|
||||
|
||||
|
||||
|
||||
|
||||
def convert_to_jst_ymd(starts_at):
|
||||
"""
|
||||
開始日時をYYYY年MM月DD日形式に変換する
|
||||
:param starts_at: 開始日時の文字列
|
||||
:return: YYYY年MM月DD日形式の文字列
|
||||
"""
|
||||
# 開始日時をUTCからJSTに変換
|
||||
dt = datetime.fromisoformat(starts_at.replace("Z", "+00:00")).astimezone(timezone(timedelta(hours=9)))
|
||||
# YYYY年MM月DD日形式に変換
|
||||
return dt.strftime("%Y年%m月%d日")
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
functions-framework==3.*
|
||||
google-cloud-secret-manager
|
||||
google-api-python-client
|
||||
google-auth-httplib2
|
||||
google-auth-oauthlib
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID=datacom-poc
|
||||
LOCATION=asia-northeast1
|
||||
BUCKET=meeting-report-data
|
||||
KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken
|
||||
MODE=dev
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID: datacom-poc
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-report-data
|
||||
KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken
|
||||
MODE: dev
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID: rational-timing-443808-u0
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-data
|
||||
KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken
|
||||
MODE: prod
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# プロジェクトIDを設定
|
||||
PROJECT_ID="datacom-poc"
|
||||
|
||||
# デプロイする関数名
|
||||
FUNCTION_NAME="mrt-create-hubspot-meeting-log"
|
||||
|
||||
# 関数のエントリポイント
|
||||
ENTRY_POINT="handle_request"
|
||||
|
||||
# ランタイム
|
||||
RUNTIME="python312"
|
||||
|
||||
# リージョン
|
||||
REGION="asia-northeast1"
|
||||
|
||||
# 環境変数ファイル
|
||||
ENV_VARS_FILE=".env_dev"
|
||||
|
||||
gcloud auth application-default set-quota-project $PROJECT_ID
|
||||
gcloud config set project $PROJECT_ID
|
||||
|
||||
# デプロイコマンド
|
||||
gcloud functions deploy $FUNCTION_NAME \
|
||||
--gen2 \
|
||||
--region $REGION \
|
||||
--runtime $RUNTIME \
|
||||
--source=./source \
|
||||
--trigger-http \
|
||||
--no-allow-unauthenticated \
|
||||
--entry-point $ENTRY_POINT \
|
||||
--env-vars-file $ENV_VARS_FILE
|
||||
|
|
@ -1,200 +0,0 @@
|
|||
import functions_framework
|
||||
from google.cloud import storage, secretmanager
|
||||
import os
|
||||
import hubspot
|
||||
from hubspot.crm.objects.meetings import SimplePublicObjectInputForCreate, ApiException
|
||||
import requests
|
||||
import csv
|
||||
import io
|
||||
import re
|
||||
import jaconv
|
||||
from rapidfuzz import process, fuzz
|
||||
import json
|
||||
|
||||
CUTOFF = 80 # Fuzzy 閾値 (0-100)
|
||||
LEGAL_SUFFIX = r'(株式会社|(株)|\(株\)|有限会社|合同会社|Inc\.?|Corp\.?|Co\.?Ltd\.?)'
|
||||
|
||||
cs_client = storage.Client(project=os.getenv("PROJECT_ID"))
|
||||
sm_client = secretmanager.SecretManagerServiceClient()
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
try:
|
||||
request_json = request.get_json()
|
||||
print(request_json)
|
||||
|
||||
mode = os.getenv("MODE") # モード(devまたはprod)
|
||||
title = request_json['title']
|
||||
host_id = request_json['host_id'] if mode == 'prod' else 'ksuenaga@datacom.jp' # ホストユーザーID(開発環境では固定値を使用)
|
||||
starts_at = request_json['starts_at']
|
||||
ends_at = request_json['ends_at']
|
||||
minutes = request_json['minutes']
|
||||
|
||||
# タイトルから【】を削除
|
||||
title = title.replace("【", "").replace("】", "")
|
||||
# タイトルから企業名を抽出
|
||||
company_name = title.split("様")[0].strip() # "様" で分割して企業名を取得
|
||||
print("抽出した企業名:", company_name)
|
||||
|
||||
# 会社名から会社IDを取得
|
||||
matched_company_id, matched_company_name = search_company(company_name)
|
||||
|
||||
# マッチしたときだけ処理を行う
|
||||
if matched_company_id:
|
||||
# ユーザーIDを取得
|
||||
by_email = load_owners()
|
||||
user_id = None
|
||||
if host_id in by_email:
|
||||
user_id = by_email[host_id]['id']
|
||||
print("取得したユーザーID:", user_id)
|
||||
|
||||
# 改行コードを <br> タグに変換
|
||||
minutes_html = minutes.replace("\n", "<br>")
|
||||
# ミーティングログを作成
|
||||
create_meeting_log(matched_company_id, title, user_id, starts_at, ends_at, minutes_html)
|
||||
|
||||
|
||||
response_data = {
|
||||
"matched_company_id": matched_company_id, # マッチした会社ID
|
||||
"matched_company_name": matched_company_name, # マッチした会社名
|
||||
}
|
||||
return (json.dumps(response_data, ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
except ApiException as e:
|
||||
print("Exception when calling basic_api->create: %s\n" % e)
|
||||
|
||||
|
||||
def normalize(name: str) -> str:
|
||||
"""表記ゆれ吸収用の正規化"""
|
||||
n = jaconv.z2h(name, kana=False, digit=True, ascii=True).lower()
|
||||
n = re.sub(LEGAL_SUFFIX, '', n)
|
||||
return re.sub(r'[\s\-・・,,、\.]', '', n)
|
||||
|
||||
|
||||
# GCSから会社一覧取得
|
||||
def load_componies():
|
||||
"""
|
||||
毎回 Cloud Storage から CSV を読み込む。
|
||||
*応答速度を気にしない* 前提なのでキャッシュしなくても OK。
|
||||
"""
|
||||
|
||||
blob = cs_client.bucket(os.getenv("BUCKET")).blob('master/mst_company.csv')
|
||||
raw = blob.download_as_bytes() # bytes
|
||||
|
||||
recs, by_norm = [], {}
|
||||
with io.StringIO(raw.decode("utf-8")) as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
row["norm_name"] = normalize(row["company_name"])
|
||||
recs.append(row)
|
||||
by_norm[row["norm_name"]] = row # 完全一致用ハッシュ
|
||||
|
||||
return recs, by_norm # (list[dict], dict)
|
||||
|
||||
|
||||
# GCSから担当者一覧取得
|
||||
def load_owners():
|
||||
"""
|
||||
GCS から担当者一覧 CSV を読み込み、
|
||||
email -> row 辞書 のマッピングを返す
|
||||
"""
|
||||
|
||||
blob = cs_client.bucket(os.getenv("BUCKET")).blob('master/mst_owner.csv')
|
||||
raw = blob.download_as_bytes() # bytes
|
||||
|
||||
by_email = {}
|
||||
with io.StringIO(raw.decode("utf-8")) as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
# row に "email" と "user_id" フィールドがある前提
|
||||
email = row["email"].strip().lower()
|
||||
by_email[email] = row
|
||||
|
||||
return by_email
|
||||
|
||||
|
||||
|
||||
def fuzzy_candidates(norm: str, recs):
|
||||
"""
|
||||
norm : 正規化済み検索語
|
||||
recs : 会社レコード list[dict] (norm_name 含む)
|
||||
戻り値 : list[(score:int, idx:int)]
|
||||
"""
|
||||
top = 2 # 上位 2 件を取得
|
||||
matches = process.extract(
|
||||
norm,
|
||||
[r["norm_name"] for r in recs],
|
||||
scorer=fuzz.WRatio,
|
||||
score_cutoff=CUTOFF,
|
||||
limit=top
|
||||
)
|
||||
print("ファジーマッチ結果:", matches)
|
||||
if len(matches) == 0:
|
||||
return None # マッチなしの場合は None を返す
|
||||
elif len(matches) == 1:
|
||||
return recs[matches[0][2]] # 上位 1 件のみの場合はそのレコードを返す
|
||||
else:
|
||||
if(matches[0][1] == matches[1][1]):
|
||||
return None # 上位 2 件のスコアが同じ場合は None を返す
|
||||
return recs[matches[0][2]] # 上位 1 件のみの場合はそのレコードを返す
|
||||
|
||||
|
||||
def search_company(company_name):
|
||||
# -------------------- マスタ読み込み --------------------
|
||||
recs, by_norm = load_componies()
|
||||
norm_company_name = normalize(company_name)
|
||||
print("正規化した企業名:", norm_company_name)
|
||||
|
||||
matched_company_id = None
|
||||
matched_company_name = None
|
||||
# -------------------- 完全一致 --------------------
|
||||
if norm_company_name in by_norm:
|
||||
matched_company_id = by_norm[norm_company_name]["company_id"]
|
||||
matched_company_name = by_norm[norm_company_name]["company_name"]
|
||||
|
||||
# -------------------- ファジーマッチ複数 --------------------
|
||||
else :
|
||||
result = fuzzy_candidates(norm_company_name, recs)
|
||||
if result:
|
||||
matched_company_id = result["company_id"]
|
||||
matched_company_name = result["company_name"]
|
||||
|
||||
print("マッチした会社ID:", matched_company_id)
|
||||
print("マッチした会社名:", matched_company_name)
|
||||
return matched_company_id, matched_company_name
|
||||
|
||||
|
||||
def create_meeting_log(company_id ,title, user_id, starts_at, ends_at, minutes):
|
||||
"""
|
||||
HubSpot API を使ってミーティングログを作成する。
|
||||
"""
|
||||
access_key = get_access_key() # Secret Manager からアクセストークンを取得
|
||||
hs_client = hubspot.Client.create(access_token=access_key)
|
||||
|
||||
properties = {
|
||||
"hs_timestamp": starts_at,
|
||||
"hs_meeting_title": title,
|
||||
"hubspot_owner_id": user_id,
|
||||
"hs_meeting_body": minutes,
|
||||
"hs_meeting_start_time": starts_at,
|
||||
"hs_meeting_end_time": ends_at,
|
||||
|
||||
}
|
||||
|
||||
simple_public_object_input_for_create = SimplePublicObjectInputForCreate(
|
||||
associations=[{"types":[{"associationCategory":"HUBSPOT_DEFINED","associationTypeId":188}],"to":{"id":company_id}}],
|
||||
properties=properties
|
||||
)
|
||||
|
||||
api_response = hs_client.crm.objects.meetings.basic_api.create(simple_public_object_input_for_create=simple_public_object_input_for_create)
|
||||
print(api_response)
|
||||
|
||||
#
|
||||
# SecretManagerからアクセストークンを取得
|
||||
#
|
||||
def get_access_key():
|
||||
key_path = os.getenv('KEY_PATH') + "/versions/1"
|
||||
# アクセストークン取得
|
||||
response = sm_client.access_secret_version(name=key_path)
|
||||
# アクセストークンをデコード
|
||||
access_token = response.payload.data.decode("UTF-8")
|
||||
return access_token
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
functions-framework==3.*
|
||||
Flask
|
||||
google-cloud-storage
|
||||
google-cloud-workflows
|
||||
google-cloud-secret-manager
|
||||
hubspot-api-client
|
||||
rapidfuzz
|
||||
jaconv
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
KEY_PATH=projects/32472615575/secrets/sa-access-google-drive-key
|
||||
LOG_FOLDER_ID=1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
|
||||
MEETING_FOLDER_ID=1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
|
||||
HUBSPOT_COMPANY_URL=https://app-na2.hubspot.com/contacts/242960467/record/0-2
|
||||
MODE=dev
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key
|
||||
LOG_FOLDER_ID: 1IZToaM9K9OJXrgV05aLO5k2ZCXpdlJzX
|
||||
MEETING_FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
|
||||
HUBSPOT_COMPANY_URL: https://app-na2.hubspot.com/contacts/242960467/record/0-2
|
||||
MODE: dev
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key
|
||||
LOG_FOLDER_ID: 1arL6AxpvA7N6Umg4wdrdAcRWBdKc-Jfb
|
||||
MEETING_FOLDER_ID: 0AGT_1dSq66qYUk9PVA
|
||||
HUBSPOT_COMPANY_URL: https://app.hubspot.com/contacts/22400567/record/0-2
|
||||
MODE: production
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# プロジェクトIDを設定
|
||||
PROJECT_ID="datacom-poc"
|
||||
|
||||
# デプロイする関数名
|
||||
FUNCTION_NAME="mrt-create-log-sheet"
|
||||
|
||||
# 関数のエントリポイント
|
||||
ENTRY_POINT="handle_request"
|
||||
|
||||
# ランタイム
|
||||
RUNTIME="python312"
|
||||
|
||||
# リージョン
|
||||
REGION="asia-northeast1"
|
||||
|
||||
# 環境変数ファイル
|
||||
ENV_VARS_FILE=".env_dev"
|
||||
|
||||
gcloud auth application-default set-quota-project $PROJECT_ID
|
||||
gcloud config set project $PROJECT_ID
|
||||
|
||||
# デプロイコマンド
|
||||
gcloud functions deploy $FUNCTION_NAME \
|
||||
--gen2 \
|
||||
--region $REGION \
|
||||
--runtime $RUNTIME \
|
||||
--source=./source \
|
||||
--trigger-http \
|
||||
--no-allow-unauthenticated \
|
||||
--entry-point $ENTRY_POINT \
|
||||
--env-vars-file $ENV_VARS_FILE
|
||||
|
|
@ -1,218 +0,0 @@
|
|||
import functions_framework
|
||||
from google.cloud import secretmanager
|
||||
from google.oauth2 import service_account
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.errors import HttpError
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
|
||||
sm_client = secretmanager.SecretManagerServiceClient()
|
||||
|
||||
|
||||
SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
|
||||
HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"]
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
# POSTリクエストの処理
|
||||
if request.method != 'POST':
|
||||
return ('', 405, {'Allow': 'POST', 'Content-Type': 'application/json'}) # メソッドがPOSTでない場合は405エラーを返す
|
||||
|
||||
"""Shows basic usage of the Drive Activity API.
|
||||
|
||||
Prints information about the last 10 events that occured the user's Drive.
|
||||
"""
|
||||
try:
|
||||
log_folder_id = os.getenv("LOG_FOLDER_ID") # 共有ドライブID
|
||||
meeting_folder_id = os.getenv("MEETING_FOLDER_ID") # ミーティングフォルダID
|
||||
mode = os.getenv("MODE") # モード(devまたはprod)
|
||||
|
||||
service_account_info = get_service_account_info()
|
||||
# 認証
|
||||
credentials = get_credentials(service_account_info)
|
||||
|
||||
# APIクライアントの構築
|
||||
drive_service = build("drive", "v3", credentials=credentials)
|
||||
sheet_service = build("sheets", "v4", credentials=credentials)
|
||||
|
||||
|
||||
# 現在日時をJSTに変換
|
||||
jst_now = datetime.now(timezone.utc).astimezone(timezone(timedelta(hours=9)))
|
||||
# JSTの現在日時を文字列に変換
|
||||
ym_str = jst_now.strftime("%Y%m")
|
||||
y_str = jst_now.strftime("%Y")
|
||||
|
||||
|
||||
# 年別のフォルダを検索
|
||||
target_folder = get_directory_files_dev(drive_service, log_folder_id, y_str) if mode == "dev" else get_directory_files_prod(drive_service, meeting_folder_id, log_folder_id, y_str)
|
||||
print("target_folder", target_folder)
|
||||
|
||||
year_folder_id = None
|
||||
if not target_folder:
|
||||
# フォルダが存在しない場合は新規作成
|
||||
year_folder_id = create_new_folder(drive_service, log_folder_id, y_str)
|
||||
else:
|
||||
# フォルダが存在する場合はそのIDを使用
|
||||
year_folder_id = target_folder[0]['id']
|
||||
print("年別のフォルダID:", year_folder_id)
|
||||
|
||||
# スプレッドシートを作成
|
||||
spreadsheet_id = create_new_spreadsheet(drive_service, year_folder_id, ym_str)
|
||||
print("スプレッドシートID:", spreadsheet_id)
|
||||
# 注意事項追加
|
||||
append_log_to_sheet(sheet_service, spreadsheet_id, ["※シート名変更厳禁"])
|
||||
# ヘッダーを追加
|
||||
append_log_to_sheet(sheet_service, spreadsheet_id, HEADER_VALUES)
|
||||
|
||||
|
||||
|
||||
return (json.dumps({"status": "success"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
|
||||
except HttpError as error:
|
||||
# TODO(developer) - Handleerrors from drive activity API.
|
||||
print(f"An error occurred: {error}")
|
||||
|
||||
|
||||
#
|
||||
# SecretManagerから秘密鍵を取得
|
||||
#
|
||||
def get_service_account_info():
|
||||
key_path = os.getenv('KEY_PATH') + "/versions/1"
|
||||
# 秘密鍵取得
|
||||
response = sm_client.access_secret_version(name=key_path)
|
||||
# 秘密鍵の値をデコード
|
||||
secret_key = response.payload.data.decode("UTF-8")
|
||||
return json.loads(secret_key)
|
||||
|
||||
# Google Drive認証
|
||||
def get_credentials(service_account_info):
|
||||
credentials = service_account.Credentials.from_service_account_info(
|
||||
service_account_info,
|
||||
scopes=SCOPES
|
||||
)
|
||||
return credentials
|
||||
|
||||
|
||||
# 開発用マイドライブからのファイルを取得
|
||||
def get_directory_files_dev(service,shared_folder_id, filename):
|
||||
"""
|
||||
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
|
||||
:param filename: ファイル名
|
||||
:param directory_id: ディレクトリID
|
||||
:param pages_max: 最大ページ探索数
|
||||
:return: ファイルリスト
|
||||
"""
|
||||
items = []
|
||||
page = 0
|
||||
pages_max = 10 # 最大ページ数
|
||||
while True:
|
||||
page += 1
|
||||
if page == pages_max:
|
||||
break
|
||||
results = service.files().list(
|
||||
corpora="user",
|
||||
includeItemsFromAllDrives=True,
|
||||
includeTeamDriveItems=True,
|
||||
q=f"'{shared_folder_id}' in parents and name = '{filename}' and trashed = false",
|
||||
supportsAllDrives=True,
|
||||
pageSize=10,
|
||||
fields="nextPageToken, files(id, name)").execute()
|
||||
items += results.get("files", [])
|
||||
|
||||
page_token = results.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
return items
|
||||
|
||||
# 本番用共有ドライブからのファイルを取得
|
||||
def get_directory_files_prod(service,shared_folder_id,sub_folder_id,filename):
|
||||
"""
|
||||
対象のディレクトリ配下からファイル名で検索した結果を配列で返す
|
||||
:param filename: ファイル名
|
||||
:param directory_id: ディレクトリID
|
||||
:param pages_max: 最大ページ探索数
|
||||
:return: ファイルリスト
|
||||
"""
|
||||
items = []
|
||||
page = 0
|
||||
pages_max = 10 # 最大ページ数
|
||||
while True:
|
||||
page += 1
|
||||
if page == pages_max:
|
||||
break
|
||||
results = service.files().list(
|
||||
corpora="drive",
|
||||
driveId=shared_folder_id,
|
||||
includeItemsFromAllDrives=True,
|
||||
includeTeamDriveItems=True,
|
||||
q=f"'{sub_folder_id}' in parents and name = '{filename}' and trashed = false",
|
||||
supportsAllDrives=True,
|
||||
pageSize=10,
|
||||
fields="nextPageToken, files(id, name, parents)").execute()
|
||||
items += results.get("files", [])
|
||||
|
||||
page_token = results.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
return items
|
||||
|
||||
def create_new_folder(service, sub_folder_id, title):
|
||||
"""
|
||||
Google Drive APIを使用して新しいフォルダを作成する
|
||||
:param service: Google Drive APIのサービスオブジェクト
|
||||
:param title: フォルダのタイトル
|
||||
:return: 作成したフォルダのID
|
||||
"""
|
||||
file_metadata = {
|
||||
"name": title,
|
||||
"parents": [sub_folder_id], # 共有ドライブのIDを指定
|
||||
"mimeType": "application/vnd.google-apps.folder",
|
||||
}
|
||||
|
||||
result = service.files().create(body=file_metadata, fields="id", supportsAllDrives=True).execute()
|
||||
return result.get('id')
|
||||
|
||||
|
||||
def create_new_spreadsheet(service,folder_id,title):
|
||||
"""
|
||||
Google Sheets APIを使用して新しいスプレッドシートを作成する
|
||||
:param service: Google Sheets APIのサービスオブジェクト
|
||||
:param title: スプレッドシートのタイトル
|
||||
:return: 作成したスプレッドシートのID
|
||||
"""
|
||||
file_metadata = {
|
||||
'name': title,
|
||||
'parents': [folder_id], # 作成したフォルダのIDを指定
|
||||
'mimeType': 'application/vnd.google-apps.spreadsheet',
|
||||
}
|
||||
result = (
|
||||
service.files()
|
||||
.create(body=file_metadata, fields="id", supportsAllDrives=True)
|
||||
.execute()
|
||||
)
|
||||
return result.get("id")
|
||||
|
||||
|
||||
def append_log_to_sheet(service, spreadsheet_id, row_data):
|
||||
"""
|
||||
Google Sheets APIを使用してスプレッドシートにログを追加する
|
||||
:param service: Google Sheets APIのサービスオブジェクト
|
||||
:param spreadsheet_id: スプレッドシートのID
|
||||
:param row_data: 追加するログデータ(リスト形式)
|
||||
"""
|
||||
body = {
|
||||
'values': [row_data]
|
||||
}
|
||||
|
||||
# スプレッドシートにログを追加
|
||||
result = service.spreadsheets().values().append(
|
||||
spreadsheetId=spreadsheet_id,
|
||||
range='Sheet1',
|
||||
valueInputOption="USER_ENTERED",
|
||||
insertDataOption='INSERT_ROWS',
|
||||
body=body,
|
||||
).execute()
|
||||
print(f"{result.get('updates').get('updatedCells')} cells appended.")
|
||||
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
functions-framework==3.*
|
||||
google-cloud-secret-manager
|
||||
google-api-python-client
|
||||
google-auth-httplib2
|
||||
google-auth-oauthlib
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID=datacom-poc
|
||||
LOCATION=asia-northeast1
|
||||
BUCKET=meeting-report-data
|
||||
OBJECT=master/mst_company.csv
|
||||
KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID: datacom-poc
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-report-data
|
||||
OBJECT: master/mst_company.csv
|
||||
KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID: rational-timing-443808-u0
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-data
|
||||
OBJECT: master/mst_company.csv
|
||||
KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
import functions_framework
|
||||
from google.cloud import storage, secretmanager
|
||||
import os
|
||||
import hubspot
|
||||
from hubspot.crm.objects.meetings import ApiException
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
|
||||
cs_client = storage.Client()
|
||||
sm_client = secretmanager.SecretManagerServiceClient()
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
try:
|
||||
# 会社一覧取得
|
||||
companies = fetch_all_companies()
|
||||
# メモリ上で CSV を生成
|
||||
csv_buffer = io.StringIO()
|
||||
writer = csv.writer(csv_buffer)
|
||||
# ヘッダー行
|
||||
writer.writerow(["company_id", "company_name"])
|
||||
# 各行を書き込み
|
||||
for row in companies:
|
||||
company_id = row['properties']['hs_object_id']
|
||||
company_name = row['properties']['name']
|
||||
writer.writerow([company_id, company_name])
|
||||
|
||||
# Cloud Storage にアップロード
|
||||
upload_to_gcs(csv_buffer)
|
||||
return 'success', 200
|
||||
except ApiException as e:
|
||||
print("Exception when calling basic_api->create: %s\n" % e)
|
||||
return (json.dumps("", ensure_ascii=False), 500, {"Content-Type": "application/json"})
|
||||
|
||||
def fetch_all_companies():
|
||||
"""
|
||||
Companies API の get_page をページネーション付きで呼び出し、
|
||||
全オブジェクトをリストで返す。
|
||||
"""
|
||||
access_key = get_access_key() # Secret Manager からアクセストークンを取得
|
||||
hs_client = hubspot.Client.create(access_token=access_key)
|
||||
|
||||
all_companies = []
|
||||
after = None
|
||||
limit = 100 # 1 回あたりの取得件数(最大 100)
|
||||
|
||||
while True:
|
||||
# get_page の基本呼び出し
|
||||
response = hs_client.crm.companies.basic_api.get_page(
|
||||
limit=limit,
|
||||
archived=False,
|
||||
after=after
|
||||
)
|
||||
|
||||
# レスポンスから companies の配列を追加
|
||||
if response.results:
|
||||
all_companies.extend([c.to_dict() for c in response.results])
|
||||
|
||||
# 次ページがない場合はループ終了
|
||||
paging = response.paging
|
||||
if not paging or not paging.next or not paging.next.after:
|
||||
break
|
||||
|
||||
# next.after をセットして次ループへ
|
||||
after = paging.next.after
|
||||
|
||||
return all_companies
|
||||
|
||||
def upload_to_gcs(data):
|
||||
"""
|
||||
メモリ上の CSV データを Cloud Storage にアップロード
|
||||
"""
|
||||
bucket = cs_client.bucket(os.getenv("BUCKET"))
|
||||
blob = bucket.blob(os.getenv("OBJECT"))
|
||||
blob.upload_from_string(data.getvalue(), content_type='text/csv')
|
||||
|
||||
#
|
||||
# SecretManagerからアクセストークンを取得
|
||||
#
|
||||
def get_access_key():
|
||||
key_path = os.getenv('KEY_PATH') + "/versions/1"
|
||||
# アクセストークン取得
|
||||
response = sm_client.access_secret_version(name=key_path)
|
||||
# アクセストークンをデコード
|
||||
access_token = response.payload.data.decode("UTF-8")
|
||||
return access_token
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
functions-framework==3.*
|
||||
Flask
|
||||
google-cloud-storage
|
||||
google-cloud-secret-manager
|
||||
hubspot-api-client
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID=datacom-poc
|
||||
LOCATION=asia-northeast1
|
||||
BUCKET=meeting-report-data
|
||||
OBJECT=master/mst_owner.csv
|
||||
KEY_PATH=projects/32472615575/secrets/mrt-hubspot-accesstoken
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID: datacom-poc
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-report-data
|
||||
OBJECT: master/mst_owner.csv
|
||||
KEY_PATH: projects/32472615575/secrets/mrt-hubspot-accesstoken
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
PROJECT_ID: rational-timing-443808-u0
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-data
|
||||
OBJECT: master/mst_owner.csv
|
||||
KEY_PATH: projects/570987459910/secrets/mrt-hubspot-accesstoken
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
import functions_framework
|
||||
from google.cloud import storage, secretmanager
|
||||
import os
|
||||
import hubspot
|
||||
from hubspot.crm.objects.meetings import ApiException
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
|
||||
cs_client = storage.Client()
|
||||
sm_client = secretmanager.SecretManagerServiceClient()
|
||||
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
try:
|
||||
# 会社一覧取得
|
||||
owners = fetch_all_owners()
|
||||
# メモリ上で CSV を生成
|
||||
csv_buffer = io.StringIO()
|
||||
writer = csv.writer(csv_buffer)
|
||||
# ヘッダー行
|
||||
writer.writerow(["id", "email"])
|
||||
# 各行を書き込み
|
||||
for row in owners:
|
||||
user_id = row['id']
|
||||
email = row['email']
|
||||
writer.writerow([user_id, email])
|
||||
|
||||
# Cloud Storage にアップロード
|
||||
upload_to_gcs(csv_buffer)
|
||||
return (json.dumps('', ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
except ApiException as e:
|
||||
print("Exception when calling basic_api->create: %s\n" % e)
|
||||
|
||||
|
||||
return (json.dumps("", ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
|
||||
def fetch_all_owners():
|
||||
"""
|
||||
Companies API の get_page をページネーション付きで呼び出し、
|
||||
全オブジェクトをリストで返す。
|
||||
"""
|
||||
access_key = get_access_key() # Secret Manager からアクセストークンを取得
|
||||
hs_client = hubspot.Client.create(access_token=access_key)
|
||||
|
||||
all_owners = []
|
||||
after = None
|
||||
limit = 100 # 1 回あたりの取得件数(最大 100)
|
||||
|
||||
while True:
|
||||
# get_page の基本呼び出し
|
||||
response = hs_client.crm.owners.owners_api.get_page(
|
||||
limit=limit,
|
||||
archived=False,
|
||||
after=after
|
||||
)
|
||||
|
||||
# レスポンスから companies の配列を追加
|
||||
if response.results:
|
||||
all_owners.extend([c.to_dict() for c in response.results])
|
||||
|
||||
# 次ページがない場合はループ終了
|
||||
paging = response.paging
|
||||
if not paging or not paging.next or not paging.next.after:
|
||||
break
|
||||
|
||||
# next.after をセットして次ループへ
|
||||
after = paging.next.after
|
||||
|
||||
return all_owners
|
||||
|
||||
def upload_to_gcs(data):
|
||||
"""
|
||||
メモリ上の CSV データを Cloud Storage にアップロード
|
||||
"""
|
||||
bucket = cs_client.bucket(os.getenv("BUCKET"))
|
||||
blob = bucket.blob(os.getenv("OBJECT"))
|
||||
blob.upload_from_string(data.getvalue(), content_type='text/csv')
|
||||
|
||||
#
|
||||
# SecretManagerからアクセストークンを取得
|
||||
#
|
||||
def get_access_key():
|
||||
key_path = os.getenv('KEY_PATH') + "/versions/1"
|
||||
# アクセストークン取得
|
||||
response = sm_client.access_secret_version(name=key_path)
|
||||
# アクセストークンをデコード
|
||||
access_token = response.payload.data.decode("UTF-8")
|
||||
return access_token
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
functions-framework==3.*
|
||||
Flask
|
||||
google-cloud-storage
|
||||
google-cloud-secret-manager
|
||||
hubspot-api-client
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
MIITEL_URL=https://datacom.miitel.jp/
|
||||
PROJECT_ID=datacom-poc
|
||||
MODEL_ID=gemini-2.5-flash
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
MIITEL_URL: https://datacom.miitel.jp/
|
||||
PROJECT_ID: datacom-poc
|
||||
MODEL_ID: gemini-2.5-flash
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
MIITEL_URL: https://datacom.miitel.jp/
|
||||
PROJECT_ID: rational-timing-443808-u0
|
||||
MODEL_ID: gemini-2.5-flash
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# プロジェクトIDを設定
|
||||
PROJECT_ID="datacom-poc"
|
||||
|
||||
# デプロイする関数名
|
||||
FUNCTION_NAME="mrt-generate-meeting-minutes"
|
||||
|
||||
# 関数のエントリポイント
|
||||
ENTRY_POINT="handle_request"
|
||||
|
||||
# ランタイム
|
||||
RUNTIME="python312"
|
||||
|
||||
# リージョン
|
||||
REGION="asia-northeast1"
|
||||
|
||||
# 環境変数ファイル
|
||||
ENV_VARS_FILE=".env_dev"
|
||||
|
||||
gcloud auth application-default set-quota-project $PROJECT_ID
|
||||
gcloud config set project $PROJECT_ID
|
||||
|
||||
# デプロイコマンド
|
||||
gcloud functions deploy $FUNCTION_NAME \
|
||||
--gen2 \
|
||||
--region $REGION \
|
||||
--runtime $RUNTIME \
|
||||
--source=./source \
|
||||
--trigger-http \
|
||||
--cpu=0.5 \
|
||||
--memory=1Gi \
|
||||
--no-allow-unauthenticated \
|
||||
--entry-point $ENTRY_POINT \
|
||||
--env-vars-file $ENV_VARS_FILE
|
||||
|
|
@ -1,132 +0,0 @@
|
|||
import functions_framework
|
||||
import vertexai
|
||||
from vertexai.generative_models import GenerativeModel, ChatSession
|
||||
from google.cloud import storage
|
||||
from google.cloud import secretmanager
|
||||
import json
|
||||
import requests
|
||||
import os
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import gzip
|
||||
|
||||
|
||||
# Storage クライアントを作成
|
||||
storage_client = storage.Client()
|
||||
sm_client = secretmanager.SecretManagerServiceClient()
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
# POSTリクエストの処理
|
||||
if request.method != 'POST':
|
||||
return ({'error': 'Method not allowed'}, 405, {'Content-Type': 'application/json'})
|
||||
try:
|
||||
request_json = request.get_json()
|
||||
print(request_json)
|
||||
|
||||
project_id = os.getenv("PROJECT_ID")
|
||||
miitel_url = os.getenv("MIITEL_URL")
|
||||
|
||||
video_info = request_json["video"]
|
||||
|
||||
access_permission = video_info["access_permission"]
|
||||
video_id = video_info["id"] # 会議履歴ID
|
||||
host_name = video_info["host"]["user_name"] # ホストユーザー名
|
||||
host_id = video_info["host"]["login_id"] # ホストユーザーID
|
||||
starts_at = video_info["starts_at"] # 開始日時
|
||||
ends_at = video_info["ends_at"] # 終了日時
|
||||
|
||||
video_url = miitel_url + "app/video/" + video_id # 会議履歴URL
|
||||
title = video_info["title"] # 会議タイトル
|
||||
print("会議タイトル",title)
|
||||
|
||||
# 閲覧制限のない会議のみ生成
|
||||
if access_permission != "EVERYONE":
|
||||
return (json.dumps({"status": "end"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
|
||||
# 社外ミーティングのみ議事録作成
|
||||
if "様" not in title or "社内" in title:
|
||||
return (json.dumps({"status": "end"}, ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
|
||||
# 議事録ファイル名
|
||||
jst_date_str = generate_jst_date(starts_at) # 開始日時をJSTに変換
|
||||
file_name = f"{jst_date_str} {title} {host_name}"
|
||||
print(file_name)
|
||||
# 議事録作成
|
||||
speech_recognition = video_info["speech_recognition"]["raw"] # 文字起こしデータ
|
||||
minutes_text = create_minutes(project_id,speech_recognition)
|
||||
print("議事録作成完了")
|
||||
|
||||
# テキスト内容をセット
|
||||
minutes = f"会議履歴URL:{video_url}\n"
|
||||
minutes += f"担当者:{host_name}\n\n"
|
||||
minutes += minutes_text
|
||||
|
||||
response_data = {
|
||||
"status": "next", # ステータス
|
||||
"title": title, # 会議タイトル
|
||||
"host_id": host_id, # ホストユーザーID
|
||||
"host_name": host_name, # ホストユーザー名
|
||||
"video_url": video_url, # 会議履歴URL
|
||||
"starts_at": starts_at, # 開始日時
|
||||
"ends_at": ends_at, # 終了日時
|
||||
"file_name": file_name, # 議事録ファイル名
|
||||
"minutes": minutes, # 議事録内容
|
||||
}
|
||||
|
||||
return (json.dumps(response_data, ensure_ascii=False), 200, {"Content-Type": "application/json"})
|
||||
except Exception as e:
|
||||
# エラー
|
||||
error_response = {
|
||||
"error": str(e) #エラー内容
|
||||
}
|
||||
print(str(e))
|
||||
return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー
|
||||
|
||||
|
||||
def generate_jst_date(starts_at):
|
||||
|
||||
# UTCの文字列をdatetimeオブジェクトに変換
|
||||
utc_datetime = datetime.fromisoformat(starts_at)
|
||||
|
||||
# JSTへの変換
|
||||
jst_timezone = timezone(timedelta(hours=9)) # JSTはUTC+9
|
||||
jst_datetime = utc_datetime.astimezone(jst_timezone)
|
||||
|
||||
# yyyy-MM-dd形式にフォーマット
|
||||
jst_date_str = jst_datetime.strftime("%Y年%m月%d日")
|
||||
return jst_date_str
|
||||
|
||||
|
||||
def create_minutes(project_id,speech_recognition):
|
||||
location = "us-central1"
|
||||
model_id = os.getenv("MODEL_ID")
|
||||
# print("モデルID:", model_id)
|
||||
|
||||
vertexai.init(project=project_id, location=location)
|
||||
model = GenerativeModel(model_id)
|
||||
# print("モデル初期化完了")
|
||||
|
||||
prompt = f"""
|
||||
あなたは議事録作成のプロフェッショナルです。以下の「文字起こし結果」は営業マンが録音した商談の文字起こしです。以下の制約条件に従い、最高の商談報告の議事録を作成してください。
|
||||
|
||||
制約条件:
|
||||
1. 文字起こし結果にはAIによる書き起こしミスがある可能性を考慮してください。
|
||||
2. 冒頭に主要な「決定事項」と「アクションアイテム」をまとめてください。
|
||||
3. 議論のポイントを議題ごとに要約してください。
|
||||
4. 見出しや箇条書きを用いて、情報が探しやすい構造で簡潔かつ明瞭に記述してください。
|
||||
5. 要約は500文字以内に収めてください。
|
||||
6. 箇条書き形式で簡潔にまとめてください。
|
||||
7. マークダウン記法は使わず、各項目を「■」や「・」等を使って見やすくしてください。
|
||||
|
||||
文字起こし結果:
|
||||
{speech_recognition}
|
||||
"""
|
||||
|
||||
|
||||
# print("-------------プロンプト-------------")
|
||||
# print(prompt[:1000])
|
||||
# print("-------------議事録作成-------------")
|
||||
response = model.generate_content(prompt)
|
||||
# print(response.text)
|
||||
return response.text
|
||||
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
functions-framework==3.*
|
||||
google-cloud-storage
|
||||
google-cloud-aiplatform
|
||||
google-cloud-secret-manager
|
||||
pydrive2
|
||||
BIN
functions/generate_minutes/.DS_Store
vendored
Normal file
BIN
functions/generate_minutes/.DS_Store
vendored
Normal file
Binary file not shown.
|
|
@ -5,12 +5,12 @@
|
|||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "npm run build && functions-framework --target=helloHttp --port=8080 --source=dist/index.js",
|
||||
"dev": "dotenv -e .env_dev -- nodemon --watch . --exec \"functions-framework --target=helloHttp --port=8080\"",
|
||||
"debug": "dotenv -e .env_dev -- node --inspect node_modules/.bin/functions-framework --source=dist/index.js --target=helloHttp",
|
||||
"watch": "concurrently \"dotenv -e .env_dev -- npm run build -- --watch\" \"dotenv -e .env_dev -- nodemon --watch ./dist/ --exec npm run debug\""
|
||||
},
|
||||
"devDependencies": {
|
||||
"@google-cloud/functions-framework": "^3.0.0",
|
||||
"@types/archiver": "^7.0.0",
|
||||
"@types/express": "^4.17.0",
|
||||
"@types/node": "^20.0.0",
|
||||
"dotenv-cli": "^11.0.0",
|
||||
|
|
@ -19,12 +19,14 @@
|
|||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google-cloud/local-auth": "^2.1.0",
|
||||
"@google-cloud/storage": "^7.17.3",
|
||||
"@google/genai": "^1.30.0",
|
||||
"@hubspot/api-client": "^13.4.0",
|
||||
"archiver": "^7.0.1",
|
||||
"concurrently": "^9.2.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"express": "^4.21.2",
|
||||
"fast-fuzzy": "^1.12.0",
|
||||
"googleapis": "^105.0.0",
|
||||
"zod": "^4.1.13"
|
||||
}
|
||||
|
|
|
|||
24
functions/generate_minutes/serverConfig.ts
Normal file
24
functions/generate_minutes/serverConfig.ts
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import { join } from "path";
|
||||
|
||||
export const GEMINI_MODEL_ID = "gemini-2.5-flash";
|
||||
export const DEBUG = true;
|
||||
|
||||
export const CREDENTIALS_PATH = join(__dirname, process.env.SEARVICE_ACCOUNT_CREDENTIALS_FILE || '');
|
||||
|
||||
export const CLOUD_STORAGE_MASTER_FOLDER_NAME = "master";
|
||||
export const CLOUD_STORAGE_LOG_FOLDER_NAME = "request_logs";
|
||||
export const COMPANIES_FILE_NAME = "companies.json";
|
||||
export const OWNERS_FILE_NAME = "owners.json";
|
||||
|
||||
export const LEGAL_SUFFIX = /(株式会社|(株)|\(株\)|有限会社|合同会社|Inc\.?|Corp\.?|Co\.?Ltd\.?)/;
|
||||
|
||||
export const Y_FORMAT = 'yyyy';
|
||||
export const YM_FORMAT = 'yyyyMM'
|
||||
export const DATETIME_FORMAT = 'yyyy-MM-dd hh:mm:ss';
|
||||
export const DATE_FORMAT = 'yyyy年MM月dd日';
|
||||
|
||||
export const FOLDER_MIMETYPE = 'application/vnd.google-apps.folder';
|
||||
export const DOCUMENT_MIMETYPE = 'application/vnd.google-apps.document';
|
||||
export const SHEET_MIMETYPE = 'application/vnd.google-apps.spreadsheet';
|
||||
|
||||
export const LOG_SHEET_HEADER_VALUES = ["タイムスタンプ","商談日", "タイトル", "登録先企業","担当者", "ミーティングURL", "議事録URL", "HubSpot会社概要URL"]
|
||||
|
|
@ -1,28 +1,56 @@
|
|||
import express from "express";
|
||||
import zlib from "zlib";
|
||||
import { storageController } from "./logics/storage";
|
||||
import { MiiTelWebhookSchema, processRequest } from "./logics/process";
|
||||
import { hubspotController } from "./logics/hubspot";
|
||||
import { createCustomError } from "./logics/error";
|
||||
import { CLOUD_STORAGE_LOG_FOLDER_NAME, CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, OWNERS_FILE_NAME } from "../serverConfig";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get("/hello", (req, res) => res.send("こんにちは!"));
|
||||
// Process Request From Miitel Webhook
|
||||
router.post("/miitel", async (req, res) => {
|
||||
try {
|
||||
const body = req.body;
|
||||
const parsedBody = MiiTelWebhookSchema.safeParse(body);
|
||||
if (!parsedBody.success) throw createCustomError("ZOD_FAILED");
|
||||
|
||||
router.post("/miitel", async(req, res) => {
|
||||
const body = req.body;
|
||||
// await storageController.saveToGCS("request_log",'test', JSON.stringify(req.body));
|
||||
const videoInfo = parsedBody.data.video;
|
||||
const gzipped = zlib.gzipSync(JSON.stringify(body));
|
||||
await storageController.saveToGCS(CLOUD_STORAGE_LOG_FOLDER_NAME, `${videoInfo.id}.json.gz`, gzipped, 'application/gzip');
|
||||
|
||||
const parsedBody = MiiTelWebhookSchema.safeParse(body);
|
||||
if(!parsedBody.success) {
|
||||
console.error("Invalid webhook body:", parsedBody.error);
|
||||
return;
|
||||
await processRequest(videoInfo);
|
||||
|
||||
res.status(200).send("ok");
|
||||
} catch(err) {
|
||||
res.status(400).send("Invalid webhook body");
|
||||
}
|
||||
console.log("miitel webhook received:", parsedBody.data.video.id);
|
||||
|
||||
await processRequest(parsedBody.data.video);
|
||||
|
||||
res.send("こんにちは!");
|
||||
});
|
||||
|
||||
router.post("/getLog", async(req, res) => {
|
||||
// Update Master Data And Check Google Drive Folder
|
||||
router.post("/dailyBatch", async (req, res) => {
|
||||
try {
|
||||
console.log("Starting daily batch process...");
|
||||
// export companies to GCS
|
||||
const companies = await hubspotController.getCompanies();
|
||||
if(!companies) throw createCustomError("GET_OWNERS_FAILED");
|
||||
await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, JSON.stringify(companies), 'application/json');
|
||||
|
||||
// export owners to GCS
|
||||
const owners = await hubspotController.getOwners();
|
||||
if(!owners) throw createCustomError("GET_COMPANIES_FAILED");
|
||||
await storageController.saveToGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME, JSON.stringify(owners), 'application/json');
|
||||
|
||||
// check folders in Google Drive
|
||||
res.status(200).send("Daily batch executed.");
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error in daily batch:", error);
|
||||
}
|
||||
});
|
||||
|
||||
// Check Log By Meeting ID
|
||||
router.post("/getLog", async (req, res) => {
|
||||
console.log(req.body);
|
||||
const meetingId = req.body.meetingId;
|
||||
const exist = await storageController.existsInGCS("request_log", "test.json.gz");
|
||||
|
|
@ -32,4 +60,33 @@ router.post("/getLog", async(req, res) => {
|
|||
res.send(log);
|
||||
});
|
||||
|
||||
|
||||
|
||||
// router.post("/deleteFile", async (req, res) => {
|
||||
// console.log(req.body);
|
||||
// const fileId = req.body.fileId;
|
||||
// const googleAuth = await googleDriveController.getAuth();
|
||||
// const driveClilent = googleDriveController.getDriveClient(googleAuth);
|
||||
// await googleDriveController.deleteFile(driveClilent, fileId);
|
||||
// res.send('ok');
|
||||
// });
|
||||
|
||||
// router.post("/test", async (req, res) => {
|
||||
// try {
|
||||
|
||||
// const googleAuth = await googleDriveController.getAuth();
|
||||
// const driveClilent = googleDriveController.getDriveClient(googleAuth);
|
||||
// const sheetsClient = googleDriveController.getSheetsClient(googleAuth);
|
||||
// const folderId = await googleDriveController.searchFileIdByFileName(driveClilent, MINUTES_CREATION_HISTORY_FOLDER_ID, '2025');
|
||||
// if(!folderId) throw new Error()
|
||||
// // console.log(fileId);
|
||||
// // const sheetId = await googleDriveController.getLogSheetId(driveClilent, sheetsClient, folderId, 'test1');
|
||||
// // console.log('sheet id : ', sheetId);
|
||||
// res.send("ok");
|
||||
// } catch (error) {
|
||||
// console.error("Error in /test endpoint:", error);
|
||||
// res.status(500).send("Error in /test endpoint");
|
||||
// }
|
||||
// });
|
||||
|
||||
export default router;
|
||||
|
|
@ -21,5 +21,12 @@ export const dateController = {
|
|||
return formatted.replace(/(y+)/g, (v) =>
|
||||
date.getFullYear().toString().slice(-v.length)
|
||||
);
|
||||
}
|
||||
},
|
||||
getCurrentJstTime: (format: string) => {
|
||||
const utcDate = new Date().toUTCString();
|
||||
const jstDate = dateController.convertToJst(utcDate);
|
||||
const jstStr = dateController.getFormattedDate(jstDate, format);
|
||||
return jstStr;
|
||||
// return dateController.getFormattedDate(utcDate, "yyyy/MM/dd hh:mm:ss");
|
||||
},
|
||||
};
|
||||
29
functions/generate_minutes/src/logics/error.ts
Normal file
29
functions/generate_minutes/src/logics/error.ts
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
import { Response } from "express";
|
||||
import z from "zod";
|
||||
import { ERROR_DEFINITIONS, ErrorKey } from "../stores/errorCodes";
|
||||
|
||||
const CustomErrorSchema = z.object({
|
||||
code: z.string(),
|
||||
message: z.string(),
|
||||
statusCode:z.number(),
|
||||
});
|
||||
|
||||
export type CustomError = z.infer<typeof CustomErrorSchema>;
|
||||
|
||||
export const createCustomError = (key: ErrorKey): CustomError => {
|
||||
const errorInfo = ERROR_DEFINITIONS[key];
|
||||
return CustomErrorSchema.parse(errorInfo);
|
||||
};
|
||||
|
||||
export const responseError = (error: any, res: Response | null = null) => {
|
||||
if (!CustomErrorSchema.safeParse(error).success) {
|
||||
console.error(error);
|
||||
console.error("========== Unknown Error ==========");
|
||||
if(res) return res.status(500).send('Internal Server Error');
|
||||
}
|
||||
const parsedError = CustomErrorSchema.parse(error);
|
||||
console.error("========== Custom Error ==========");
|
||||
console.error(`Error Code: ${parsedError.code}\n Message: ${parsedError.message}`);
|
||||
if(res) return res.status(parsedError.statusCode).send(parsedError.message);
|
||||
}
|
||||
|
||||
53
functions/generate_minutes/src/logics/file.ts
Normal file
53
functions/generate_minutes/src/logics/file.ts
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
import { create } from "domain";
|
||||
import { dateController } from "./date";
|
||||
import path, { join } from "path";
|
||||
import archiver from "archiver";
|
||||
import { googleDriveController } from "./googleDrive";
|
||||
import fs from "fs";
|
||||
|
||||
|
||||
|
||||
export const fileController = {
|
||||
createMinutesFileName: (title: string, hostName: string, jstStartsAt: Date): string => {
|
||||
const dateStr = dateController.getFormattedDate(jstStartsAt, "yyyy年MM月dd日");
|
||||
const fileName = `${dateStr} ${title} ${hostName}`;
|
||||
return fileName;
|
||||
},
|
||||
extractCompanyNameFromTitle: (title: string) => {
|
||||
const normalizedTitle = title.replace("【", "").replace("】", "");
|
||||
const companyName = normalizedTitle.split("様")[0];
|
||||
return companyName
|
||||
},
|
||||
createMinutesContent: (videoUrl: string, hostName: string, minutes: string): string => {
|
||||
let minutesContent = `会議履歴URL:${videoUrl}\n`;
|
||||
minutesContent += `担当者:${hostName}\n\n`;
|
||||
minutesContent += minutes;
|
||||
return minutesContent;
|
||||
},
|
||||
createZip: async (body: any, outputPath: string, fileName: string) => {
|
||||
console.log(outputPath);
|
||||
await new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputPath);
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: 9 }
|
||||
});
|
||||
|
||||
output.on('close', () => {
|
||||
console.log(archive.pointer() + ' total bytes');
|
||||
console.log('archiver has been finalized and the output file descriptor has closed.');
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
archive.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
archive.pipe(output);
|
||||
archive.append(JSON.stringify(body), { name: fileName + '.json' });
|
||||
archive.finalize();
|
||||
})
|
||||
console.log("ZIP created");
|
||||
return;
|
||||
},
|
||||
|
||||
};
|
||||
62
functions/generate_minutes/src/logics/fuzzyMatch.ts
Normal file
62
functions/generate_minutes/src/logics/fuzzyMatch.ts
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
import { search } from "fast-fuzzy";
|
||||
import { storageController } from "./storage";
|
||||
import { CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME, LEGAL_SUFFIX } from "../../serverConfig";
|
||||
import { Company, CompanySchema } from "./hubspot";
|
||||
import z from "zod";
|
||||
|
||||
|
||||
export const fuzzyMatchController = {
|
||||
searchMatchedCompany: async(companyName: string): Promise<Company | null> => {
|
||||
try {
|
||||
const companiesJson = await storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, COMPANIES_FILE_NAME);
|
||||
if(!companiesJson) return null;
|
||||
const parsedCompanies = z.array(CompanySchema).safeParse(JSON.parse(companiesJson));
|
||||
if(!parsedCompanies.success) return null;
|
||||
|
||||
const normalizedCompanyName = fuzzyMatchController.normalizeCompanyName(companyName);
|
||||
const normalizedCompanies: Company[] = parsedCompanies.data.map((c) => CompanySchema.parse({
|
||||
id: c.id,
|
||||
name: fuzzyMatchController.normalizeCompanyName(c.name),
|
||||
}));
|
||||
|
||||
// Exact Match
|
||||
const exactMatchedCompany = fuzzyMatchController.searchExactMatchedCompany(normalizedCompanyName, normalizedCompanies);
|
||||
// console.log(exactMatchedCompanyId);
|
||||
if(exactMatchedCompany) return exactMatchedCompany;
|
||||
|
||||
// Fuzzy Match
|
||||
const results = search(
|
||||
fuzzyMatchController.normalizeCompanyName(companyName),
|
||||
parsedCompanies.data,
|
||||
{
|
||||
keySelector: (obj) => fuzzyMatchController.normalizeCompanyName(obj.name),
|
||||
returnMatchData: true,
|
||||
threshold: 0.8,
|
||||
},
|
||||
);
|
||||
console.log("===== Search Results =====");
|
||||
console.log(results);
|
||||
if(results.length <= 0) return null;
|
||||
if(results.length === 1) return results[0].item;
|
||||
if(results.length > 1) {
|
||||
// 同スコアが複数存在
|
||||
if(results[0].score === results[1].score) return null;
|
||||
// トップが単独の場合のみ
|
||||
return results[0].item;
|
||||
}
|
||||
return null;
|
||||
} catch(error) {
|
||||
console.error(error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
normalizeCompanyName: (companyName: string) => {
|
||||
return companyName.replace(LEGAL_SUFFIX, '');
|
||||
},
|
||||
searchExactMatchedCompany: (companyName: string, companies: Company[]): Company | null => {
|
||||
for(const company of companies) {
|
||||
if(companyName === company.name) return company;
|
||||
};
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
|
@ -1,36 +1,226 @@
|
|||
import { authenticate } from "@google-cloud/local-auth";
|
||||
import { JSONClient } from "google-auth-library/build/src/auth/googleauth";
|
||||
import { google } from "googleapis";
|
||||
import path from "path";
|
||||
import { docs_v1, drive_v3, google, sheets_v4 } from "googleapis";
|
||||
import fs from "fs";
|
||||
import { CREDENTIALS_PATH, DEBUG, FOLDER_MIMETYPE, LOG_SHEET_HEADER_VALUES, SHEET_MIMETYPE } from "../../serverConfig";
|
||||
import z from "zod";
|
||||
|
||||
const SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
|
||||
const CREDENTIALS_PATH = path.join(process.cwd(), 'credentials.json');
|
||||
const MAX_RETRY = 3;
|
||||
|
||||
export const LogRowDataSchema = z.object({
|
||||
timestamp: z.string(),
|
||||
meetingDate: z.string(),
|
||||
title: z.string(),
|
||||
matchedCompanyName: z.string(),
|
||||
ownerName: z.string(),
|
||||
meetingUrl: z.string(),
|
||||
documentUrl: z.string(),
|
||||
hubspotUrl: z.string(),
|
||||
});
|
||||
|
||||
export type LogRowData = z.infer<typeof LogRowDataSchema>
|
||||
|
||||
export const googleDriveController = {
|
||||
getAuth: async():Promise<any> => {
|
||||
const auth = await new google.auth.GoogleAuth({
|
||||
keyFile: CREDENTIALS_PATH,
|
||||
scopes: SCOPES,
|
||||
});
|
||||
return auth;
|
||||
getAuth: async (): Promise<any> => {
|
||||
try {
|
||||
const credentials = JSON.parse(process.env.SEARVICE_ACCOUNT_CREDENTIALS || "{}");
|
||||
console.log(credentials)
|
||||
const auth = await new google.auth.GoogleAuth({
|
||||
credentials: credentials,
|
||||
scopes: SCOPES,
|
||||
});
|
||||
if (!auth) return null;
|
||||
return auth;
|
||||
} catch (error) {
|
||||
console.error("Error obtaining Google Auth:", error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
checkConnection: async() => {
|
||||
const auth = await googleDriveController.getAuth();
|
||||
getDriveClient: (auth: any): drive_v3.Drive => {
|
||||
// console.log("Google Drive client authenticated.");
|
||||
const drive = google.drive({ version: "v3", auth: auth});
|
||||
const folder = '1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw';
|
||||
const res = await drive.files.list({
|
||||
q: `'${folder}' in parents`,
|
||||
const drive = google.drive({ version: "v3", auth: auth });
|
||||
return drive;
|
||||
},
|
||||
getSheetsClient: (auth: any): sheets_v4.Sheets => {
|
||||
const sheets = google.sheets({ version: "v4", auth: auth });
|
||||
return sheets;
|
||||
},
|
||||
getDocsClient: (auth: any): docs_v1.Docs => {
|
||||
const docs = google.docs({ version: "v1", auth: auth });
|
||||
return docs;
|
||||
},
|
||||
|
||||
uploadFile: async (driveClient: drive_v3.Drive, filePath: string, folderId: string, fileName: string): Promise<any> => {
|
||||
try {
|
||||
console.log("Uploading file to Google Drive:", filePath);
|
||||
const response = await driveClient.files.create({
|
||||
requestBody: {
|
||||
name: fileName,
|
||||
parents: [folderId],
|
||||
},
|
||||
media: {
|
||||
mimeType: "application/zip",
|
||||
body: fs.createReadStream(filePath),
|
||||
},
|
||||
});
|
||||
console.log("File uploaded, Id:", response.data.id);
|
||||
fs.unlinkSync(filePath);
|
||||
return response.data.id;
|
||||
} catch (error) {
|
||||
console.error("Error uploading file:", error);
|
||||
fs.unlinkSync(filePath);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
getFolderId: async (driveClient: drive_v3.Drive, folderId: string, fileName: string): Promise<string | null> => {
|
||||
try {
|
||||
const existsFolderId = await googleDriveController.searchFileIdByFileName(driveClient, folderId, fileName);
|
||||
if(existsFolderId) return existsFolderId;
|
||||
console.log('=== Create New Folder ===')
|
||||
const newFolderId = googleDriveController.createNewFile(driveClient, folderId, fileName, FOLDER_MIMETYPE);
|
||||
if(!newFolderId) return null;
|
||||
return newFolderId;
|
||||
} catch (error) {
|
||||
console.error('Error searching files:', error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
searchFileIdByFileName: async (driveClient: drive_v3.Drive, folderId: string, fileName: string): Promise<string | null> => {
|
||||
try {
|
||||
const params = googleDriveController.getSearchFileParamsByDebugMode(folderId);
|
||||
const res = await driveClient.files.list(params);
|
||||
console.log("Files:");
|
||||
console.log(res.data.files);
|
||||
if(!res.data.files) return null;
|
||||
|
||||
for(const file of res.data.files) {
|
||||
if(fileName === file.name) {
|
||||
if(!file.id) return null;
|
||||
return file.id;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
console.error('Error searching files:', error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
getSearchFileParamsByDebugMode: (folderId: string): drive_v3.Params$Resource$Files$List => {
|
||||
if(DEBUG) {
|
||||
return {
|
||||
corpora: 'user',
|
||||
q: `'${folderId}' in parents`,
|
||||
pageSize: 10,
|
||||
fields: "files(id, name)",
|
||||
includeItemsFromAllDrives: true,
|
||||
includeTeamDriveItems: true,
|
||||
supportsAllDrives: true
|
||||
}
|
||||
}
|
||||
return {
|
||||
corpora: 'drive',
|
||||
driveId: process.env.GOOGLE_DRIVE_FOLDER_ID,
|
||||
q: `'${folderId}' in parents`,
|
||||
pageSize: 10,
|
||||
fields: "files(id, name)",
|
||||
});
|
||||
console.log("Files:");
|
||||
console.log(res.data.files);
|
||||
includeItemsFromAllDrives: true,
|
||||
includeTeamDriveItems: true,
|
||||
supportsAllDrives: true
|
||||
}
|
||||
},
|
||||
uploadFile: async() => {
|
||||
createNewFile: async (driveClient: drive_v3.Drive, folderId: string, fileName: string, mimeType: string): Promise<string | null> => {
|
||||
try {
|
||||
const requestBody = {
|
||||
name: fileName,
|
||||
parents: [folderId], // 作成したフォルダのIDを指定
|
||||
mimeType: mimeType,
|
||||
};
|
||||
|
||||
const file = await driveClient.files.create({
|
||||
requestBody,
|
||||
// fields: 'id',
|
||||
});
|
||||
|
||||
console.log('File Id:', file.data);
|
||||
if (!file.data.id) return null;
|
||||
return file.data.id;
|
||||
} catch (error) {
|
||||
console.error('Error creating file:', error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
// CAUTION
|
||||
deleteFile: async (driveClient: drive_v3.Drive, fileId: string) => {
|
||||
try {
|
||||
const body = { trashed: true }
|
||||
const response = await driveClient.files.update({
|
||||
fileId: fileId,
|
||||
requestBody: body,
|
||||
});
|
||||
console.log('File deleted:', response.data);
|
||||
} catch (error) {
|
||||
console.error('Error deleting file:', error);
|
||||
}
|
||||
},
|
||||
addContentToDocs: async (docsClient: docs_v1.Docs, documentId: string, content: string): Promise<boolean> => {
|
||||
try {
|
||||
const requestBody: docs_v1.Schema$BatchUpdateDocumentRequest = {
|
||||
requests: [
|
||||
{
|
||||
insertText: {
|
||||
text: content,
|
||||
location: {
|
||||
index: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
const response = await docsClient.documents.batchUpdate({
|
||||
documentId: documentId,
|
||||
requestBody: requestBody,
|
||||
});
|
||||
console.log('Content added to document:', response.data);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Error adding content to document:', error);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
createNewFile: async() => {
|
||||
|
||||
getLogSheetId: async (driveClient: drive_v3.Drive, sheetsClient: sheets_v4.Sheets, folderId: string, fileName: string): Promise<string | null> => {
|
||||
try {
|
||||
const existsSheetId = await googleDriveController.searchFileIdByFileName(driveClient, folderId, fileName);
|
||||
if(existsSheetId) return existsSheetId;
|
||||
console.log('=== Create New Sheet ===')
|
||||
const newSheetId = await googleDriveController.createNewFile(driveClient, folderId, fileName, SHEET_MIMETYPE);
|
||||
if(!newSheetId) return null;
|
||||
//
|
||||
await googleDriveController.insertRowToSheet(sheetsClient, newSheetId, ['※シート名変更厳禁']);
|
||||
await googleDriveController.insertRowToSheet(sheetsClient, newSheetId, LOG_SHEET_HEADER_VALUES);
|
||||
return newSheetId;
|
||||
} catch (error) {
|
||||
console.error('Error searching files:', error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
insertRowToSheet: async (sheetsClient: sheets_v4.Sheets, sheetId: string, rowData: string[] ): Promise<boolean> => {
|
||||
try {
|
||||
const body = {
|
||||
values: [rowData]
|
||||
}
|
||||
const params: sheets_v4.Params$Resource$Spreadsheets$Values$Append = {
|
||||
spreadsheetId: sheetId,
|
||||
range: 'Sheet1',
|
||||
valueInputOption: 'USER_ENTERED',
|
||||
insertDataOption: 'INSERT_ROWS',
|
||||
requestBody: body,
|
||||
}
|
||||
await sheetsClient.spreadsheets.values.append(params);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
import { Client } from "@hubspot/api-client";
|
||||
import { AssociationSpecAssociationCategoryEnum } from "@hubspot/api-client/lib/codegen/crm/objects/meetings/models/AssociationSpec";
|
||||
import { PublicAssociationsForObject } from "@hubspot/api-client/lib/codegen/crm/objects/meetings";
|
||||
import z, { email } from "zod";
|
||||
|
||||
const hubspotClient = new Client({ accessToken: process.env.HUBSPOT_ACCESS_TOKEN });
|
||||
|
||||
export const CompanySchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
})
|
||||
|
||||
export const OwnerSchema = z.object({
|
||||
id: z.string(),
|
||||
email: z.string().optional().default(''),
|
||||
});
|
||||
|
||||
export type Company = z.infer<typeof CompanySchema>;
|
||||
export type Owner = z.infer<typeof OwnerSchema>;
|
||||
|
||||
export const hubspotController = {
|
||||
check: async() => {
|
||||
const response = await hubspotClient.crm.companies.getAll();
|
||||
console.log(response.length);
|
||||
},
|
||||
getCompanies: async(): Promise<Company[] | null> => {
|
||||
try {
|
||||
const allCompanies: Company[] = [];
|
||||
const limit = 100;
|
||||
let after: string | undefined = undefined;
|
||||
for(let i = 0; i < 1000; i++) {
|
||||
console.log(`Fetching companies, iteration ${i+1}`);
|
||||
const response = await hubspotClient.crm.companies.basicApi.getPage(limit, after);
|
||||
// console.log(response.results);
|
||||
const companies: Company[] = response.results.map((company) => CompanySchema.parse({
|
||||
id: company.id,
|
||||
name: company.properties.name,
|
||||
}));
|
||||
allCompanies.push(...companies);
|
||||
|
||||
if(response.paging && response.paging.next && response.paging.next.after) {
|
||||
after = response.paging.next.after;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return allCompanies;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
getOwners: async(): Promise<Owner[] | null> => {
|
||||
try {
|
||||
const allOwners: Owner[] = [];
|
||||
const limit = 100;
|
||||
let after: string | undefined = undefined;
|
||||
for(let i = 0; i < 1000; i++) {
|
||||
console.log(`Fetching owners, iteration ${i+1}`);
|
||||
const response = await hubspotClient.crm.owners.ownersApi.getPage(undefined,after,limit);
|
||||
// console.log(response.results);
|
||||
|
||||
const owners: Owner[] = response.results.map((owner) => OwnerSchema.parse({
|
||||
id: owner.id,
|
||||
email: owner.email,
|
||||
}));
|
||||
allOwners.push(...owners);
|
||||
|
||||
if(response.paging && response.paging.next && response.paging.next.after) {
|
||||
after = response.paging.next.after;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return allOwners;
|
||||
} catch (error) {
|
||||
console.error("Error fetching owners:", error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
createMeetingLog: async(companyId: string, title: string, userId: string | null, minutes: string, startsAt: string, endsAt: string ): Promise<boolean> => {
|
||||
try {
|
||||
// 改行コードを変換
|
||||
const minutes_html = minutes.replace("\n", "<br>")
|
||||
const associations: PublicAssociationsForObject[] = [{
|
||||
types: [
|
||||
{associationCategory: AssociationSpecAssociationCategoryEnum.HubspotDefined, associationTypeId: 188},
|
||||
],
|
||||
to: {id: companyId},
|
||||
}];
|
||||
|
||||
const properties = {
|
||||
hs_timestamp: startsAt,
|
||||
hs_meeting_title: title,
|
||||
hubspot_owner_id: userId || '',
|
||||
hs_meeting_body: minutes_html,
|
||||
hs_meeting_start_time: startsAt,
|
||||
hs_meeting_end_time: endsAt,
|
||||
}
|
||||
|
||||
const result = await hubspotClient.crm.objects.meetings.basicApi.create({
|
||||
associations: associations,
|
||||
properties: properties,
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error creating HubSpot meeting log:", error);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
searchOwnerIdByEmail: (email: string, owners: Owner[]): string | null => {
|
||||
for(const owner of owners) {
|
||||
if(email === owner.email) return owner.id;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
|
@ -1,7 +1,15 @@
|
|||
import z from "zod";
|
||||
import { aiController } from "./ai";
|
||||
import { dateController } from "./date";
|
||||
import { googleDriveController } from "./googleDrive";
|
||||
import { googleDriveController, LogRowData, LogRowDataSchema } from "./googleDrive";
|
||||
import { fileController } from "./file";
|
||||
import path, { join } from "path";
|
||||
import fs from "fs";
|
||||
import { createCustomError, responseError } from "./error";
|
||||
import { storageController } from "./storage";
|
||||
import { CLOUD_STORAGE_MASTER_FOLDER_NAME, DATE_FORMAT, DATETIME_FORMAT, DOCUMENT_MIMETYPE, OWNERS_FILE_NAME, Y_FORMAT, YM_FORMAT } from "../../serverConfig";
|
||||
import { hubspotController, OwnerSchema } from "./hubspot";
|
||||
import { fuzzyMatchController } from "./fuzzyMatch";
|
||||
|
||||
const VideoInfoSchema = z.looseObject({
|
||||
id: z.string(),
|
||||
|
|
@ -26,32 +34,101 @@ export const MiiTelWebhookSchema = z.object({
|
|||
|
||||
// export type MiiTelWebhook = z.infer<typeof MiiTelWebhookSchema>;
|
||||
|
||||
export const processRequest = async(videoInfo: VideoInfo) => {
|
||||
const videoId = videoInfo.id;
|
||||
const title = videoInfo.title;
|
||||
const startsAt = videoInfo.starts_at;
|
||||
const endsAt = videoInfo.ends_at;
|
||||
const accessPermission = videoInfo.access_permission;
|
||||
|
||||
const host_id = videoInfo.host.login_id;
|
||||
const host_name = videoInfo.host.user_name;
|
||||
|
||||
const speechRecognition = videoInfo.speech_recognition.raw;
|
||||
|
||||
console.log(startsAt);
|
||||
const jstStartsAt = dateController.convertToJst(startsAt);
|
||||
const jstEndsAt = dateController.convertToJst(endsAt);
|
||||
|
||||
googleDriveController.checkConnection();
|
||||
// console.log(dateController.getFormattedDate(startsAtJst, "yyyy/MM/dd hh:mm:ss"));
|
||||
// console.log(endsAt);
|
||||
// console.log("Processing video:", host_id, host_name, title);
|
||||
if(accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return;
|
||||
|
||||
const GOOGLE_DRIVE_FOLDER_ID = process.env.GOOGLE_DRIVE_FOLDER_ID || '';
|
||||
const MIITEL_REQUEST_LOG_FOLDER_ID = process.env.MIITEL_REQUEST_LOG_FOLDER_ID || '';
|
||||
const MINUTES_CREATION_HISTORY_FOLDER_ID = process.env.MINUTES_CREATION_HISTORY_FOLDER_ID || '';
|
||||
const MIITEL_URL = process.env.MIITEL_URL || '';
|
||||
const HUBSPOT_COMPANY_URL = process.env.HUBSPOT_COMPANY_URL || '';
|
||||
|
||||
|
||||
// Save Request Log to Google Drive
|
||||
// const minute = await aiController.generateMinutes(speechRecognition);
|
||||
// console.log(minute);
|
||||
const FILE_PATH = join(__dirname, "../files/");
|
||||
|
||||
};
|
||||
export const processRequest = async (videoInfo: VideoInfo) => {
|
||||
try {
|
||||
const videoId = videoInfo.id;
|
||||
const title = videoInfo.title;
|
||||
const startsAt = videoInfo.starts_at;
|
||||
const endsAt = videoInfo.ends_at;
|
||||
const accessPermission = videoInfo.access_permission;
|
||||
|
||||
const hostId = videoInfo.host.login_id;
|
||||
const hostName = videoInfo.host.user_name;
|
||||
|
||||
const speechRecognition = videoInfo.speech_recognition.raw;
|
||||
|
||||
const jstStartsAt = dateController.convertToJst(startsAt);
|
||||
const jstEndsAt = dateController.convertToJst(endsAt);
|
||||
const fileName = fileController.createMinutesFileName(title, hostName, jstStartsAt);
|
||||
const videoUrl = `${MIITEL_URL}app/video/${videoId}`;
|
||||
|
||||
if (accessPermission !== "EVERYONE" || !title.includes("様") || title.includes("社内")) return;
|
||||
|
||||
//
|
||||
const googleAuth = await googleDriveController.getAuth();
|
||||
const driveClient = googleDriveController.getDriveClient(googleAuth);
|
||||
const docsClient = googleDriveController.getDocsClient(googleAuth);
|
||||
const sheetsClient = googleDriveController.getSheetsClient(googleAuth);
|
||||
|
||||
// ===== Save Request Log to Google Drive =====
|
||||
if (!fs.existsSync(FILE_PATH)) fs.mkdirSync(FILE_PATH, { recursive: true });
|
||||
const outputPath = path.join(FILE_PATH, fileName + '.zip');
|
||||
await fileController.createZip(videoInfo, outputPath, fileName);
|
||||
|
||||
const logFileId = await googleDriveController.uploadFile(driveClient, outputPath, MIITEL_REQUEST_LOG_FOLDER_ID, fileName + '.zip');
|
||||
if(!logFileId) throw createCustomError("UPLOAD_LOG_FAILED");
|
||||
|
||||
// ===== Generate Minutes =====
|
||||
const minutes = await aiController.generateMinutes(speechRecognition);
|
||||
console.log(minutes);
|
||||
if (!minutes) throw createCustomError("AI_GENERATION_FAILED");
|
||||
let content = `会議履歴URL:${videoUrl}\n`;
|
||||
content += `担当者:${hostName}\n\n`;
|
||||
content += minutes;
|
||||
|
||||
|
||||
// ===== Upload To Google Drive =====
|
||||
const documentId = await googleDriveController.createNewFile(driveClient, GOOGLE_DRIVE_FOLDER_ID, title, DOCUMENT_MIMETYPE);
|
||||
if (!documentId) throw createCustomError("UPLOAD_MINUTES_FAILED");
|
||||
const result = await googleDriveController.addContentToDocs(docsClient, documentId, minutes);
|
||||
if(!result) throw createCustomError("UPLOAD_MINUTES_FAILED");
|
||||
|
||||
// ===== Create Meeting Log at Hubspot =====
|
||||
const ownersJson = await storageController.loadJsonFromGCS(CLOUD_STORAGE_MASTER_FOLDER_NAME, OWNERS_FILE_NAME);
|
||||
if(!ownersJson) throw createCustomError("GET_OWNERS_FAILED");
|
||||
const parsedOwners = z.array(OwnerSchema).safeParse(JSON.parse(ownersJson));
|
||||
if(!parsedOwners.success) throw createCustomError("ZOD_FAILED");
|
||||
const ownerId = hubspotController.searchOwnerIdByEmail(hostId, parsedOwners.data);
|
||||
|
||||
|
||||
const companyName = fileController.extractCompanyNameFromTitle(title);
|
||||
const matchedCompany = await fuzzyMatchController.searchMatchedCompany(companyName);
|
||||
if(matchedCompany) await hubspotController.createMeetingLog(matchedCompany.id, title, ownerId, minutes, startsAt, endsAt);
|
||||
|
||||
// ===== Apeend Log To SpreadSheet =====
|
||||
const currentYear = dateController.getCurrentJstTime(Y_FORMAT);
|
||||
const yearFileId = await googleDriveController.getFolderId(driveClient, MINUTES_CREATION_HISTORY_FOLDER_ID, currentYear);
|
||||
if(!yearFileId) throw createCustomError("GET_FOLDER_ID_FAILED");
|
||||
|
||||
const currentYearMonth = dateController.getCurrentJstTime(YM_FORMAT);
|
||||
const sheetId = await googleDriveController.getLogSheetId(driveClient, sheetsClient, yearFileId, currentYearMonth);
|
||||
if(!sheetId) throw createCustomError("GET_SHEET_ID_FAILED");
|
||||
|
||||
const currentJstDateTimeStr = dateController.getCurrentJstTime(DATETIME_FORMAT);
|
||||
const currentJstDateStr = dateController.getCurrentJstTime(DATE_FORMAT);
|
||||
const rowData: LogRowData = LogRowDataSchema.parse({
|
||||
timestamp: currentJstDateTimeStr,
|
||||
meetingDate: currentJstDateStr,
|
||||
title: title,
|
||||
matchedCompanyName: matchedCompany?.name ?? '',
|
||||
ownerName: hostName,
|
||||
meetingUrl: videoUrl,
|
||||
documentUrl: `https://docs.google.com/document/d/${documentId}/edit`,
|
||||
hubspotUrl: matchedCompany ? `${HUBSPOT_COMPANY_URL}/${matchedCompany.id}` : '',
|
||||
});
|
||||
await googleDriveController.insertRowToSheet(sheetsClient, sheetId, Object.values(rowData));
|
||||
return;
|
||||
} catch (error) {
|
||||
responseError(error);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
|
@ -1,19 +1,15 @@
|
|||
import { Storage } from "@google-cloud/storage";
|
||||
import zlib from "zlib";
|
||||
|
||||
const csClient = new Storage({
|
||||
projectId: 'datacom-poc',
|
||||
}
|
||||
);
|
||||
const BUCKET_NAME = "meeting-report-data";
|
||||
const csClient = new Storage({projectId: process.env.PROJECT_ID});
|
||||
const BUCKET_NAME = process.env.CLOUD_STORAGE_BUCKET_NAME || '';
|
||||
const bucket = csClient.bucket(BUCKET_NAME);
|
||||
|
||||
export const storageController = {
|
||||
saveToGCS: async(folder: string, filename: string, text: string) => {
|
||||
const gzipped = zlib.gzipSync(text);
|
||||
const file = bucket.file((`${folder}/${filename}.json.gz`));
|
||||
await file.save(gzipped, {
|
||||
contentType: 'application/gzip',
|
||||
saveToGCS: async(folder: string, filename: string, content: any, contentType: string) => {
|
||||
const file = bucket.file((`${folder}/${filename}`));
|
||||
await file.save(content, {
|
||||
contentType: contentType,
|
||||
})
|
||||
},
|
||||
loadFromGCS: async(folder: string, filename: string): Promise<string | null> => {
|
||||
|
|
@ -26,6 +22,16 @@ export const storageController = {
|
|||
return null;
|
||||
}
|
||||
},
|
||||
loadJsonFromGCS: async(folder: string, filename: string): Promise<string | null> => {
|
||||
const file = bucket.file(`${folder}/${filename}`);
|
||||
// console.log("loading file:", file.name);
|
||||
try {
|
||||
const [data] = await file.download();
|
||||
return data.toString("utf-8");
|
||||
} catch (err: any) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
existsInGCS: async(folder: string, filename: string): Promise<boolean> => {
|
||||
const file = bucket.file((`${folder}/${filename}`));
|
||||
console.log("checking file:", file.name);
|
||||
|
|
|
|||
26
functions/generate_minutes/src/stores/errorCodes.ts
Normal file
26
functions/generate_minutes/src/stores/errorCodes.ts
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// errorDefinitions.ts
|
||||
|
||||
export const ERROR_DEFINITIONS = {
|
||||
ZOD_FAILED: { code: "E1003", message: "zodのチェックが失敗しました", statusCode: -1 },
|
||||
// ログ ZIP の Google Drive アップロード失敗
|
||||
UPLOAD_LOG_FAILED: { code: "E3001", message: "ログファイルのアップロードに失敗しました", statusCode: 500 },
|
||||
|
||||
// AI による議事録生成失敗
|
||||
AI_GENERATION_FAILED: { code: "E2001", message: "AIによる議事録生成に失敗しました", statusCode: 500 },
|
||||
|
||||
// 議事録(Google Docs)の作成/アップロード失敗
|
||||
UPLOAD_MINUTES_FAILED: { code: "E3002", message: "議事録のアップロードに失敗しました", statusCode: 500 },
|
||||
|
||||
// オーナー情報の取得失敗
|
||||
GET_OWNERS_FAILED: { code: "E3003", message: "オーナー情報の取得に失敗しました", statusCode: 500 },
|
||||
GET_COMPANIES_FAILED: { code: "E3004", message: "会社情報の取得に失敗しました", statusCode: 500 },
|
||||
|
||||
// 議事録作成履歴スプレッドシートの取得失敗
|
||||
GET_MINUTES_HISTORY_FAILED: { code: "E3005", message: "議事録作成履歴の取得に失敗しました", statusCode: 500 },
|
||||
|
||||
|
||||
GET_FOLDER_ID_FAILED: { code: "E3006", message: "フォルダID取得に失敗しました", statusCode: 500 },
|
||||
GET_SHEET_ID_FAILED: { code: "E3007", message: "スプレッドシートID取得に失敗しました", statusCode: 500 },
|
||||
} as const;
|
||||
|
||||
export type ErrorKey = keyof typeof ERROR_DEFINITIONS;
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
PROJECT_ID=datacom-poc
|
||||
LOCATION=asia-northeast1
|
||||
BUCKET=meeting-report-data
|
||||
WORKFLOW=mrt-workflow-create-minutes
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
PROJECT_ID: datacom-poc
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-report-data
|
||||
WORKFLOW: mrt-workflow-create-minutes
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
PROJECT_ID: rational-timing-443808-u0
|
||||
LOCATION: asia-northeast1
|
||||
BUCKET: meeting-data
|
||||
WORKFLOW: mrt-workflow-create-minutes
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# プロジェクトIDを設定
|
||||
PROJECT_ID="datacom-poc"
|
||||
|
||||
# デプロイする関数名
|
||||
FUNCTION_NAME="mrt-trigger-minutes-workflow-from-miitel"
|
||||
|
||||
# 関数のエントリポイント
|
||||
ENTRY_POINT="handle_request"
|
||||
|
||||
# ランタイム
|
||||
RUNTIME="python312"
|
||||
|
||||
# リージョン
|
||||
REGION="asia-northeast1"
|
||||
|
||||
# 環境変数ファイル
|
||||
ENV_VARS_FILE=".env_dev"
|
||||
|
||||
gcloud auth application-default set-quota-project $PROJECT_ID
|
||||
gcloud config set project $PROJECT_ID
|
||||
|
||||
# デプロイコマンド
|
||||
gcloud functions deploy $FUNCTION_NAME \
|
||||
--gen2 \
|
||||
--region $REGION \
|
||||
--runtime $RUNTIME \
|
||||
--source=./source \
|
||||
--trigger-http \
|
||||
--no-allow-unauthenticated \
|
||||
--entry-point $ENTRY_POINT \
|
||||
--env-vars-file $ENV_VARS_FILE
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
import functions_framework
|
||||
from google.cloud import storage
|
||||
from google.cloud.workflows import executions_v1
|
||||
from google.cloud.workflows.executions_v1.types import Execution
|
||||
import json
|
||||
import os
|
||||
import gzip
|
||||
|
||||
|
||||
# Storage クライアントを作成
|
||||
cs_client = storage.Client()
|
||||
wf_client = executions_v1.ExecutionsClient()
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
# POSTリクエストの処理
|
||||
if request.method != 'POST':
|
||||
# 他のメソッドに対するエラーレスポンス
|
||||
return ({'error': 'Method not allowed'}, 405)
|
||||
|
||||
try:
|
||||
request_json = request.get_json()
|
||||
print(request_json)
|
||||
|
||||
|
||||
if "challenge" in request_json:
|
||||
# MiiTelのチャレンジリクエストに対する応答
|
||||
return (request_json["challenge"], 200, {'Content-Type':'text/plain'})
|
||||
|
||||
project_id = os.getenv("PROJECT_ID")
|
||||
bucket_name = os.getenv("BUCKET") # 共有ドライブID
|
||||
location = os.getenv("LOCATION") # ワークフローのロケーション
|
||||
workflow = os.getenv("WORKFLOW") # ワークフロー名
|
||||
|
||||
# デバッグ用に保存
|
||||
save_to_gcs(bucket_name,request_json)
|
||||
|
||||
# ワークフロー呼び出し
|
||||
argument = json.dumps({"video": request_json["video"]})
|
||||
execution = Execution(argument=argument)
|
||||
parent = f"projects/{project_id}/locations/{location}/workflows/{workflow}"
|
||||
print(parent)
|
||||
response = wf_client.create_execution(request={"parent": parent, "execution": execution})
|
||||
print(f"Workflow execution started: {response.name}")
|
||||
|
||||
return (json.dumps({}), 200, {'Content-Type': 'application/json'})
|
||||
except Exception as e:
|
||||
# エラー
|
||||
error_response = {
|
||||
"error": str(e) #エラー内容
|
||||
}
|
||||
print(str(e))
|
||||
return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー
|
||||
|
||||
|
||||
|
||||
|
||||
def save_to_gcs(bucket_name,request_json):
|
||||
file_name = request_json["video"]["id"] + ".json.gz"
|
||||
|
||||
bucket = cs_client.bucket(bucket_name)
|
||||
|
||||
# GCS バケットのブロブを取得
|
||||
blob = bucket.blob(f"request_log/{file_name}")
|
||||
|
||||
|
||||
# JSONを文字列に変換
|
||||
json_string = json.dumps(request_json)
|
||||
|
||||
# Gzip圧縮
|
||||
compressed_data = gzip.compress(json_string.encode('utf-8'))
|
||||
|
||||
# 圧縮されたデータをアップロード
|
||||
blob.upload_from_string(compressed_data, content_type='application/gzip')
|
||||
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
functions-framework==3.*
|
||||
Flask
|
||||
google-cloud-storage
|
||||
google-cloud-workflows
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
KEY_PATH=projects/570987459910/secrets/sa-create-minutes-key
|
||||
FOLDER_ID=0AGT_1dSq66qYUk9PVA
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
KEY_PATH: projects/32472615575/secrets/sa-access-google-drive-key
|
||||
FOLDER_ID: 1cCDJKusfrlDrJe2yHCR8pCHJXRqX-4Hw
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
KEY_PATH: projects/570987459910/secrets/sa-create-minutes-key
|
||||
FOLDER_ID: 0AGT_1dSq66qYUk9PVA
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# プロジェクトIDを設定
|
||||
PROJECT_ID="datacom-poc"
|
||||
|
||||
# デプロイする関数名
|
||||
FUNCTION_NAME="mrt-create-minutes"
|
||||
|
||||
# 関数のエントリポイント
|
||||
ENTRY_POINT="handle_request"
|
||||
|
||||
# ランタイム
|
||||
RUNTIME="python312"
|
||||
|
||||
# リージョン
|
||||
REGION="asia-northeast1"
|
||||
|
||||
# 環境変数ファイル
|
||||
ENV_VARS_FILE=".env_dev"
|
||||
|
||||
gcloud auth application-default set-quota-project $PROJECT_ID
|
||||
gcloud config set project $PROJECT_ID
|
||||
|
||||
# デプロイコマンド
|
||||
gcloud functions deploy $FUNCTION_NAME \
|
||||
--gen2 \
|
||||
--region $REGION \
|
||||
--runtime $RUNTIME \
|
||||
--source=./source \
|
||||
--trigger-http \
|
||||
--no-allow-unauthenticated \
|
||||
--entry-point $ENTRY_POINT \
|
||||
--env-vars-file $ENV_VARS_FILE
|
||||
|
|
@ -1,128 +0,0 @@
|
|||
import functions_framework
|
||||
from google.cloud import secretmanager
|
||||
from google.oauth2 import service_account
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.errors import HttpError
|
||||
import json
|
||||
import os
|
||||
|
||||
SCOPES = ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"]
|
||||
|
||||
sm_client = secretmanager.SecretManagerServiceClient()
|
||||
|
||||
@functions_framework.http
|
||||
def handle_request(request):
|
||||
# POSTリクエストの処理
|
||||
if request.method != 'POST':
|
||||
# 他のメソッドに対するエラーレスポンス
|
||||
return ({'error': 'Method not allowed'}, 405)
|
||||
|
||||
try:
|
||||
request_json = request.get_json()
|
||||
print(request_json)
|
||||
|
||||
folder_id = os.getenv("FOLDER_ID") # 共有ドライブID
|
||||
|
||||
file_name = request_json["file_name"] # 会議タイトル
|
||||
minutes = request_json["minutes"] # 議事録
|
||||
|
||||
|
||||
# Secret Manager からサービスアカウントJSON文字列を取得
|
||||
service_account_info = get_service_account_info()
|
||||
# 認証
|
||||
credentials = get_credentials(service_account_info)
|
||||
|
||||
# APIクライアントの構築
|
||||
drive_service = build("drive", "v3", credentials=credentials)
|
||||
docs_service = build("docs", "v1", credentials=credentials)
|
||||
|
||||
# ファイル作成
|
||||
document_id = create_new_document(drive_service, folder_id, file_name)
|
||||
print(f"Created document with ID: {document_id}")
|
||||
|
||||
# テキスト内容をセット
|
||||
append_minutes_to_doc(docs_service, document_id, minutes)
|
||||
|
||||
response_data = {
|
||||
"document_id": document_id, # 作成したドキュメントのID
|
||||
}
|
||||
|
||||
return json.dumps(response_data) , 200, {"Content-Type": "application/json"}
|
||||
except Exception as e:
|
||||
# エラー
|
||||
error_response = {
|
||||
"error": str(e) #エラー内容
|
||||
}
|
||||
print(str(e))
|
||||
return json.dumps(error_response), 500, {'Content-Type': 'application/json'} #エラー
|
||||
|
||||
|
||||
|
||||
#
|
||||
# SecretManagerから秘密鍵を取得
|
||||
#
|
||||
def get_service_account_info():
|
||||
key_path = os.getenv('KEY_PATH') + "/versions/1"
|
||||
# 秘密鍵取得
|
||||
response = sm_client.access_secret_version(name=key_path)
|
||||
# 秘密鍵の値をデコード
|
||||
secret_key = response.payload.data.decode("UTF-8")
|
||||
return json.loads(secret_key)
|
||||
|
||||
# Google Drive認証
|
||||
def get_credentials(service_account_info):
|
||||
credentials = service_account.Credentials.from_service_account_info(
|
||||
service_account_info,
|
||||
scopes=SCOPES
|
||||
)
|
||||
return credentials
|
||||
|
||||
|
||||
def create_new_document(service,folder_id,title):
|
||||
"""
|
||||
Google Sheets APIを使用して新しいスプレッドシートを作成する
|
||||
:param service: Google Sheets APIのサービスオブジェクト
|
||||
:param title: スプレッドシートのタイトル
|
||||
:return: 作成したスプレッドシートのID
|
||||
"""
|
||||
file_metadata = {
|
||||
'name': title,
|
||||
'parents': [folder_id], # 作成したフォルダのIDを指定
|
||||
'mimeType': 'application/vnd.google-apps.document',
|
||||
}
|
||||
result = (
|
||||
service.files()
|
||||
.create(body=file_metadata, fields="id", supportsAllDrives=True)
|
||||
.execute()
|
||||
)
|
||||
return result.get("id")
|
||||
|
||||
|
||||
def append_minutes_to_doc(service, document_id, minutes):
|
||||
"""
|
||||
Google Sheets APIを使用してスプレッドシートにログを追加する
|
||||
:param service: Google Sheets APIのサービスオブジェクト
|
||||
:param spreadsheet_id: スプレッドシートのID
|
||||
:param row_data: 追加するログデータ(リスト形式)
|
||||
"""
|
||||
requests = [
|
||||
{
|
||||
'insertText': {
|
||||
'location': {
|
||||
'index': 1,
|
||||
},
|
||||
'text': minutes
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
body = {
|
||||
'requests': requests
|
||||
}
|
||||
|
||||
# スプレッドシートにログを追加
|
||||
result = service.documents().batchUpdate(
|
||||
documentId=document_id,
|
||||
body=body,
|
||||
).execute()
|
||||
return result
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
functions-framework==3.*
|
||||
google-cloud-secret-manager
|
||||
google-api-python-client
|
||||
google-auth-httplib2
|
||||
google-auth-oauthlib
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# 環境変数
|
||||
PROJECT_ID="datacom-poc"
|
||||
WORKFLOW_NAME="mrt-workflow-create-minutes"
|
||||
|
||||
|
||||
gcloud auth application-default set-quota-project $PROJECT_ID
|
||||
gcloud config set project $PROJECT_ID
|
||||
|
||||
|
||||
gcloud workflows deploy $WORKFLOW_NAME \
|
||||
--source=main.yaml \
|
||||
--location=asia-northeast1
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
main:
|
||||
params: [input]
|
||||
steps:
|
||||
- initialize:
|
||||
assign:
|
||||
- project_id: ${sys.get_env("GOOGLE_CLOUD_PROJECT_ID")}
|
||||
- create_hubspot_meeting_log_result: {}
|
||||
- upload_minutes_to_drive_result: {}
|
||||
- generate_meeting_minutes:
|
||||
call: http.post
|
||||
args:
|
||||
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-generate-meeting-minutes"}
|
||||
body:
|
||||
video: ${input.video}
|
||||
auth:
|
||||
type: OIDC
|
||||
result: generate_meeting_minutes_result
|
||||
- conditinal_switch:
|
||||
switch:
|
||||
- condition: ${generate_meeting_minutes_result.body.status != "end"}
|
||||
steps:
|
||||
- parallel_execute:
|
||||
parallel:
|
||||
shared:
|
||||
[
|
||||
create_hubspot_meeting_log_result,
|
||||
upload_minutes_to_drive_result,
|
||||
]
|
||||
branches:
|
||||
- create_hubspot_meeting_log_branch:
|
||||
steps:
|
||||
- create_hubspot_meeting_log:
|
||||
call: http.post
|
||||
args:
|
||||
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-create-hubspot-meeting-log"}
|
||||
body:
|
||||
title: ${generate_meeting_minutes_result.body.title}
|
||||
host_id: ${generate_meeting_minutes_result.body.host_id}
|
||||
starts_at: ${generate_meeting_minutes_result.body.starts_at}
|
||||
ends_at: ${generate_meeting_minutes_result.body.ends_at}
|
||||
minutes: ${generate_meeting_minutes_result.body.minutes}
|
||||
auth:
|
||||
type: OIDC
|
||||
result: create_hubspot_meeting_log_result
|
||||
- upload_minutes_to_drive_branch:
|
||||
steps:
|
||||
- upload-minutes-to-drive:
|
||||
call: http.post
|
||||
args:
|
||||
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-upload-minutes-to-drive"}
|
||||
body:
|
||||
file_name: ${generate_meeting_minutes_result.body.file_name}
|
||||
minutes: ${generate_meeting_minutes_result.body.minutes}
|
||||
auth:
|
||||
type: OIDC
|
||||
result: upload_minutes_to_drive_result
|
||||
- append_log_to_sheet:
|
||||
call: http.post
|
||||
args:
|
||||
url: ${"https://asia-northeast1-" + project_id + ".cloudfunctions.net/mrt-append-log-to-sheet"}
|
||||
body:
|
||||
title: ${generate_meeting_minutes_result.body.title}
|
||||
host_name: ${generate_meeting_minutes_result.body.host_name}
|
||||
video_url: ${generate_meeting_minutes_result.body.video_url}
|
||||
starts_at: ${generate_meeting_minutes_result.body.starts_at}
|
||||
matched_company_id: ${create_hubspot_meeting_log_result.body.matched_company_id}
|
||||
matched_company_name: ${create_hubspot_meeting_log_result.body.matched_company_name}
|
||||
document_id: ${upload_minutes_to_drive_result.body.document_id}
|
||||
auth:
|
||||
type: OIDC
|
||||
result: append_log_to_sheet_result
|
||||
Loading…
Add table
Add a link
Reference in a new issue