add feature for customize open message

This commit is contained in:
geekgeekrun
2026-02-26 23:14:38 +08:00
parent fff8ca7dff
commit 1c7d3d992a
7 changed files with 510 additions and 190 deletions

View File

@@ -1,2 +1,7 @@
export const SINGLE_ITEM_DEFAULT_SERVE_WEIGHT = 1
export const EXPECT_CHROMIUM_BUILD_ID = '139.0.7258.154'
export const DEFAULT_CONSTANT_OPEN_CONTENT_SEGS = [
`您好,我对贵公司岗位很感兴趣,希望能有机会进一步沟通,期待您的回复`,
`接下来的聊天,我将根据我简历里的内容,向您进行自我介绍`
]

View File

@@ -18,6 +18,16 @@ export enum RECHAT_LLM_FALLBACK {
EXIT_REMINDER_PROGRAM = 2
}
export enum OPEN_CONTENT_SOURCE {
CONSTANT_CONTENT = 1,
GEMINI_WITH_CHAT_CONTEXT = 2
}
// export enum OPEN_LLM_FALLBACK {
// SEND_CONSTANT_CONTENT = 1,
// EXIT_REMINDER_PROGRAM = 2
// }
export enum RUNNING_STATUS_ENUM {
RUNNING = 0,
NORMAL_EXITED = 1,

View File

@@ -32,7 +32,7 @@ import {
requestNewMessageContent
} from '../../READ_NO_REPLY_AUTO_REMINDER_MAIN/boss-operation'
import {
autoReminderPromptTemplateFileName,
defaultPromptMap,
writeDefaultAutoRemindPrompt
} from '../../READ_NO_REPLY_AUTO_REMINDER_MAIN/boss-operation'
import {
@@ -466,17 +466,19 @@ export default function initIpc() {
const res = (await readConfigFile('resumes.json'))?.[0]
return res?.content ?? null
})
ipcMain.on('no-reply-reminder-prompt-edit', async () => {
const template = await readStorageFile(autoReminderPromptTemplateFileName, { isJson: false })
ipcMain.on('no-reply-reminder-prompt-edit', async (_, { type }) => {
const template = await readStorageFile(defaultPromptMap[type].fileName, {
isJson: false
})
if (!template) {
await writeDefaultAutoRemindPrompt()
await writeDefaultAutoRemindPrompt({ type })
}
const filePath = path.join(storageFilePath, autoReminderPromptTemplateFileName)
const filePath = path.join(storageFilePath, defaultPromptMap[type].fileName)
shell.openPath(filePath)
})
ipcMain.on('close-resume-editor', () => resumeEditorWindow?.close())
ipcMain.handle('check-if-auto-remind-prompt-valid', async () => {
await getValidTemplate()
ipcMain.handle('check-if-auto-remind-prompt-valid', async (_, { type }) => {
await getValidTemplate({ type })
})
ipcMain.handle('check-is-resume-content-valid', async () => {
const res = (await readConfigFile('resumes.json'))?.[0]
@@ -486,8 +488,8 @@ export default function initIpc() {
const res = (await readConfigFile('resumes.json'))?.[0]
return resumeContentEnoughDetect(res)
})
ipcMain.handle('overwrite-auto-remind-prompt-with-default', async () => {
await writeDefaultAutoRemindPrompt()
ipcMain.handle('overwrite-auto-remind-prompt-with-default', async (_, { type }) => {
await writeDefaultAutoRemindPrompt({ type })
})
ipcMain.handle('check-if-llm-config-list-valid', async () => {
const llmConfigList = await readConfigFile('llm.json')

View File

@@ -60,7 +60,10 @@ const pickLlmConfigFromList = (llmConfigList, blockModelSet) => {
// let _index = 0
const RESUME_PLACEHOLDER = `__REPLACE_REAL_RESUME_HERE__`
const defaultPrompt = `**核心指令:**
export const defaultPromptMap = {
rechat: {
fileName: 'auto-reminder-resume-system-message-template.md',
content: `**核心指令:**
你是一个智能求职助手需要根据用户简历生成30字左右的提醒消息满足以下要求
1. 每次生成需满足:
- √ 包含1个核心技能 + 1个成果量化
@@ -94,15 +97,21 @@ const defaultPrompt = `**核心指令:**
**输出格式:**
请确保仅回复一句话以JSON响应不要包含其他解释或内容数据结构参考\`{"response": "这里是将会发送给招聘者的内容"}\``
export const autoReminderPromptTemplateFileName = 'auto-reminder-resume-system-message-template.md'
export const getValidTemplate = async () => {
let template = await readStorageFile(autoReminderPromptTemplateFileName, { isJson: false })
if (!template) {
await writeDefaultAutoRemindPrompt()
template = defaultPrompt
},
open: {
fileName: 'auto-reminder-open-message-template.md',
content:
'请根据我的简历帮我写一句谦逊有礼貌的开场白。开头包含“您好”等类似敬语、结尾包含“期待回复”等类似话术。不必包含简历中的具体内容但需要表达出应聘意向。请确保仅响应一句话以JSON响应数据结构参考`{"response": "这里是将会发送给招聘者的内容"}`'
}
if (!template.includes(RESUME_PLACEHOLDER)) {
}
export const getValidTemplate = async ({ type }) => {
let template = await readStorageFile(defaultPromptMap[type].fileName, { isJson: false })
if (!template) {
await writeDefaultAutoRemindPrompt({ type })
template = defaultPromptMap[type].content
}
if (type === 'rechat' && !template.includes(RESUME_PLACEHOLDER)) {
const e = new Error(`简历内容占位符字符串不存在。占位字符串是 ${RESUME_PLACEHOLDER}`)
e.name = `RESUME_PLACEHOLDER_NOT_EXIST`
throw e
@@ -110,8 +119,19 @@ export const getValidTemplate = async () => {
return template
}
export const writeDefaultAutoRemindPrompt = async () => {
await writeStorageFile(autoReminderPromptTemplateFileName, defaultPrompt, { isJson: false })
export const writeDefaultAutoRemindPrompt = async ({ type }) => {
switch (type) {
case 'rechat':
await writeStorageFile(defaultPromptMap[type].fileName, defaultPromptMap[type].content, {
isJson: false
})
break
case 'open':
await writeStorageFile(defaultPromptMap[type].fileName, defaultPromptMap[type].content, {
isJson: false
})
break
}
}
export const requestNewMessageContent = async (
@@ -119,24 +139,26 @@ export const requestNewMessageContent = async (
{
requestScene,
llmConfigIdForPick
}: { requestScene?: RequestSceneEnum; llmConfigIdForPick?: string[] } = {}
}: {
requestScene?: RequestSceneEnum
llmConfigIdForPick?: string[]
} = {}
) => {
const template = await getValidTemplate()
const systemMessageTemplate = await getValidTemplate({ type: 'rechat' })
const resumeObject = (await readConfigFile('resumes.json'))?.[0]
const resumeContent = formatResumeJsonToMarkdown(resumeObject)
const chatList = [
{
role: 'system',
content: template.replace(RESUME_PLACEHOLDER, resumeContent)
content: systemMessageTemplate.replace(RESUME_PLACEHOLDER, resumeContent)
}
]
const openMessageTemplate = await getValidTemplate({ type: 'open' })
chatList.push({
role: 'user',
content:
'请根据我的简历帮我写一句谦逊有礼貌的开场白。开头包含“您好”等类似敬语、结尾包含“期待回复”等类似话术。不必包含简历中的具体内容但需要表达出应聘意向。请确保仅响应一句话以JSON响应数据结构参考`{"response": "这里是将会发送给招聘者的内容"}`'
content: openMessageTemplate
})
// chatRecords = chatRecords.slice(chatRecords.length - _index)
// debugger
for (const record of chatRecords) {
const assistantJsonContent = JSON.stringify({
response: record.text
@@ -246,14 +268,18 @@ export const requestNewMessageContent = async (
}
}
export async function sendGptContent(page: Page, chatRecords) {
export async function getGptContent(chatRecords) {
const textToSend = (
await requestNewMessageContent(chatRecords, {
requestScene: RequestSceneEnum.readNoReplyAutoReminder
})
).responseText
return textToSend
}
export async function sendMessage(page: Page, textToSend: string) {
const chatInputSelector = `.chat-conversation .message-controls .chat-input`
const chatInputHandle = await page.$(chatInputSelector)
const chatInputHandle = (await page.$(chatInputSelector))!
await chatInputHandle.click()
await sleep(500)
await chatInputHandle.click()

View File

@@ -1,7 +1,7 @@
import { bootstrap, launchBoss } from './bootstrap'
import { MsgStatus, type ChatListItem } from './types'
import { Browser, Page } from 'puppeteer'
import { sendGptContent, sendLookForwardReplyEmotion } from './boss-operation'
import { getGptContent, sendLookForwardReplyEmotion, sendMessage } from './boss-operation'
import { sleep, sleepWithRandomDelay } from '@geekgeekrun/utils/sleep.mjs'
import { waitForPage } from '@geekgeekrun/utils/puppeteer/wait.mjs'
import { app, dialog } from 'electron'
@@ -24,6 +24,7 @@ import { BossInfo } from '@geekgeekrun/sqlite-plugin/dist/entity/BossInfo'
import { messageForSaveFilter } from '../../../common/utils/chat-list'
import {
AUTO_CHAT_ERROR_EXIT_CODE,
OPEN_CONTENT_SOURCE,
RECHAT_CONTENT_SOURCE,
RECHAT_LLM_FALLBACK
} from '../../../common/enums/auto-start-chat'
@@ -40,6 +41,7 @@ import { loginWithCookieAssistant } from '../../features/login-with-cookie-assis
import initPublicIpc from '../../utils/initPublicIpc'
import { getLastUsedAndAvailableBrowser } from '../DOWNLOAD_DEPENDENCIES/utils/browser-history'
import { configWithBrowserAssistant } from '../../features/config-with-browser-assistant'
import { DEFAULT_CONSTANT_OPEN_CONTENT_SEGS } from '../../../common/constant'
process.on('SIGTERM', () => {
console.log('收到SIGTERM信号正在退出')
@@ -87,6 +89,21 @@ const onlyRemindBossWithoutBlockCompanyName =
readConfigFile('boss.json').autoReminder?.onlyRemindBossWithoutBlockCompanyName ??
!!blockCompanyNameRegExp
const openContentSource = readConfigFile('boss.json').autoReminder?.openContentSource ?? OPEN_CONTENT_SOURCE.CONSTANT_CONTENT
const constantOpenContent = (() => {
let constantOpenContent = readConfigFile('boss.json').autoReminder?.constantOpenContent ?? ''
if (constantOpenContent?.trim?.()) {
return constantOpenContent
} else {
if (rechatContentSource === RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT) {
constantOpenContent = DEFAULT_CONSTANT_OPEN_CONTENT_SEGS.join(``)
} else {
constantOpenContent = DEFAULT_CONSTANT_OPEN_CONTENT_SEGS[0]
}
}
return constantOpenContent
})()
const dbInitPromise = initDb(getPublicDbFilePath())
export const pageMapByName: {
@@ -582,30 +599,50 @@ const mainLoop = async () => {
(throttleIntervalMinutes + 4 * Math.random()) * 60 * 1000
) {
await sleepWithRandomDelay(3250)
if (rechatContentSource === RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT) {
try {
const messageListForGpt = historyMessageList
.filter((it) => it.bizType !== 101 && it.isSelf)
.slice(-recentMessageQuantityForLlm)
await sendGptContent(pageMapByName.boss!, messageListForGpt)
const messageList = historyMessageList
.filter((it) => it.bizType !== 101 && it.isSelf)
.slice(-recentMessageQuantityForLlm)
if (!messageList?.length) {
if (openContentSource === OPEN_CONTENT_SOURCE.CONSTANT_CONTENT) {
await sendMessage(pageMapByName.boss!, constantOpenContent)
gtag('rnrr_llm_content_sent')
} catch (err) {
console.log(err)
if (rechatLlmFallback === RECHAT_LLM_FALLBACK.SEND_LOOK_FORWARD_EMOTION) {
await sendLookForwardReplyEmotion(pageMapByName.boss!)
} else {
try {
const textToSend = await getGptContent(messageList)
await sendMessage(pageMapByName.boss!, textToSend)
gtag('rnrr_llm_content_sent')
} catch (err) {
console.log(err)
await sendMessage(pageMapByName.boss!, constantOpenContent)
gtag('rnrr_look_forward_reply_emotion_sent', {
fallback: true
})
} else {
gtag('rnrr_encounter_error', {
error: err
})
throw err
}
}
} else {
await sendLookForwardReplyEmotion(pageMapByName.boss!)
gtag('rnrr_look_forward_reply_emotion_sent')
if (rechatContentSource === RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT) {
try {
const textToSend = await getGptContent(messageList)
await sendMessage(pageMapByName.boss!, textToSend)
gtag('rnrr_llm_content_sent')
} catch (err) {
console.log(err)
if (rechatLlmFallback === RECHAT_LLM_FALLBACK.SEND_LOOK_FORWARD_EMOTION) {
await sendLookForwardReplyEmotion(pageMapByName.boss!)
gtag('rnrr_look_forward_reply_emotion_sent', {
fallback: true
})
} else {
gtag('rnrr_encounter_error', {
error: err
})
throw err
}
}
} else {
await sendLookForwardReplyEmotion(pageMapByName.boss!)
gtag('rnrr_look_forward_reply_emotion_sent')
}
}
} else {
cursorToContinueFind += 1

View File

@@ -54,7 +54,77 @@
</template>
</div>
</el-form-item>
<el-form-item class="mb0" label="跟进话术 - 当发现已读不回的BOSS时将要向BOSS发出">
<el-form-item label="开场白话术">
<el-radio-group v-model="formContent.autoReminder.openContentSource" w-full>
<div w-full>
<el-radio :label="OPEN_CONTENT_SOURCE.CONSTANT_CONTENT"> 固定文案 </el-radio>
<el-input
v-if="
formContent.autoReminder.openContentSource ===
OPEN_CONTENT_SOURCE.CONSTANT_CONTENT
"
v-model="formContent.autoReminder.constantOpenContent"
w-full
:autosize="{ minRows: 3 }"
max-h-8lh
type="textarea"
:placeholder="defaultConstantOpenContent"
class="pl-30px mb10px"
:style="{
boxSizing: 'border-box'
}"
/>
</div>
<div>
<el-radio :label="OPEN_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT">
由大语言模型生成的内容
</el-radio>
<div
v-if="
formContent.autoReminder.openContentSource ===
OPEN_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT
"
ml30px
>
<el-form-item class="mb4px">
<div>
<div>
<el-button
size="small"
type="primary"
@click="
handleClickEditPrompt({
type: 'open'
})
"
>
使用外部编辑器编辑“开场白话术”提示词模板 (Markdown)
</el-button>
<el-button
size="small"
type="primary"
@click="
() => {
restoreDefaultTemplate({
type: 'open',
gaEvName: 'reset_template_clicked_in_main_form'
})
}
"
>
还原默认“开场白话术”提示词模板
</el-button>
</div>
<div class="font-size-12px color-#666">
对生成效果不够满意?可在此查看、编辑“开场白话术”提示词模板。
</div>
</div>
</el-form-item>
</div>
</div>
</el-radio-group>
</el-form-item>
<el-form-item label="跟进话术 - 当发现已读不回的BOSS时将要向BOSS发出">
<el-radio-group v-model="formContent.autoReminder.rechatContentSource">
<div>
<el-tooltip
@@ -71,91 +141,117 @@
</el-radio>
</el-tooltip>
<br />
<el-radio :label="RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT">
由大语言模型(根据简历及当前聊天上下文)生成的内容
</el-radio>
<div>
<el-radio :label="RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT">
由大语言模型(根据简历及当前聊天上下文)生成的内容
</el-radio>
<div
v-if="
formContent.autoReminder.rechatContentSource ===
RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT
"
ml30px
>
<el-form-item class="mb4px">
<div>
<div>
<el-button
size="small"
type="primary"
@click="
handleClickEditPrompt({
type: 'rechat'
})
"
>
使用外部编辑器编辑“跟进话术”提示词模板 (Markdown)
</el-button>
<el-button
size="small"
type="primary"
@click="
() => {
restoreDefaultTemplate({
type: 'rechat',
gaEvName: 'reset_template_clicked_in_main_form'
})
}
"
>
还原默认“跟进话术”提示词模板
</el-button>
</div>
<div class="font-size-12px color-#666">
对生成效果不够满意?可在此查看、编辑“跟进话术”提示词模板。请在模板中需要插入简历的位置插入
__REPLACE_REAL_RESUME_HERE__
</div>
</div>
</el-form-item>
</div>
</div>
</div>
</el-radio-group>
</el-form-item>
<div class="ml-30px">
<div mt10px>
<el-form-item mb0 label="大语言模型公共设置及效果预览" />
<template
v-if="
formContent.autoReminder.rechatContentSource ===
RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT
[
formContent.autoReminder.rechatContentSource,
formContent.autoReminder.openContentSource
].includes(RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT)
"
>
<el-form-item class="mb4px">
<div>
<el-button size="small" type="primary" @click="handleClickEditResume">
编辑简历
</el-button>
<div class="font-size-12px color-#666">
简历内容将提交给大语言模型,以用于生成已读不回提醒消息;提交内容及生成消息中不会包含期望薪资
</div>
</div>
</el-form-item>
<el-form-item class="mb4px">
<div>
<div ml-30px>
<el-form-item class="mb4px">
<div>
<el-button size="small" type="primary" @click="handleClickEditPrompt">
使用外部编辑器编辑提示词模板 (Markdown)
<el-button size="small" type="primary" @click="handleClickEditResume">
编辑简历
</el-button>
<el-button
size="small"
type="primary"
@click="
() => {
gtagRenderer('reset_template_clicked_in_main_form')
restoreDefaultTemplate()
}
"
<div class="font-size-12px color-#666">
简历内容将提交给大语言模型,以用于生成已读不回提醒消息;提交内容及生成消息中不会包含期望薪资
</div>
</div>
</el-form-item>
<el-form-item prop="recentMessageQuantityForLlm">
<div>
携带最近
<el-input-number
v-model="formContent.autoReminder.recentMessageQuantityForLlm"
class="w-120px"
:min="8"
:max="20"
:precision="0"
:step="1"
></el-input-number>
次聊天内容作为上下文生成新消息
</div>
</el-form-item>
<el-form-item label="当配置的所有大模型均不可使用时">
<div class="flex flex-items-center">
<el-select
v-model="formContent.autoReminder.rechatLlmFallback"
class="w200px"
label="name"
>
还原默认提示词模板
</el-button>
<el-option
v-for="option in rechatLlmFallbackOptions"
:key="option.value"
:value="option.value"
:label="option.name"
/>
</el-select>
</div>
<div class="font-size-12px color-#666">
对生成效果不够满意?可在此查看、编辑提示词模板。请在模板中需要插入简历的位置插入
__REPLACE_REAL_RESUME_HERE__
</div>
</div>
</el-form-item>
<el-form-item prop="recentMessageQuantityForLlm">
<div>
携带最近
<el-input-number
v-model="formContent.autoReminder.recentMessageQuantityForLlm"
class="w-120px"
:min="8"
:max="20"
:precision="0"
:step="1"
></el-input-number>
次聊天内容作为上下文生成新消息
</div>
</el-form-item>
<el-form-item>
<el-button size="small" type="primary" @click="handleTestEffectClicked"
>使用当前配置模拟已读不回自动复聊过程</el-button
>
</el-form-item>
<el-form-item prop="recentMessageQuantityForLlm">
<div class="flex flex-items-center">
<span class="whitespace-nowrap">当所有模型均不可使用时&nbsp;</span>
<el-select
v-model="formContent.autoReminder.rechatLlmFallback"
class="w200px"
label="name"
>
<el-option
v-for="option in rechatLlmFallbackOptions"
:key="option.value"
:value="option.value"
:label="option.name"
/>
</el-select>
</div>
</el-form-item>
</el-form-item>
</div>
</template>
<el-form-item ml-30px>
<el-button size="small" type="primary" @click="handleTestEffectClicked"
>使用当前配置模拟已读不回自动复聊过程</el-button
><span text-orange ml10px
>&lt;- 正式运行前建议在这里先试一试大模型生成效果是否符合预期哦</span
>
</el-form-item>
</div>
<el-form-item label="跟进间隔(分钟)" prop="throttleIntervalMinutes">
<el-input-number
@@ -259,6 +355,8 @@ import { computed, nextTick, onUnmounted, ref, watch } from 'vue'
import { dayjs, ElForm, ElMessage, ElMessageBox, ElSelect, ElOption } from 'element-plus'
import { useRouter } from 'vue-router'
import {
OPEN_CONTENT_SOURCE,
// OPEN_LLM_FALLBACK,
RECHAT_CONTENT_SOURCE,
RECHAT_LLM_FALLBACK,
RUNNING_STATUS_ENUM
@@ -267,6 +365,7 @@ import { gtagRenderer as baseGtagRenderer } from '@renderer/utils/gtag'
import mittBus from '../../utils/mitt'
import { QuestionFilled } from '@element-plus/icons-vue'
import RunningOverlay from '@renderer/features/RunningOverlay/index.vue'
import { DEFAULT_CONSTANT_OPEN_CONTENT_SEGS } from '../../../../common/constant'
const gtagRenderer = (name, params?: object) => {
return baseGtagRenderer(name, {
scene: 'rnrr-config',
@@ -282,7 +381,10 @@ const formContent = ref({
recentMessageQuantityForLlm: 8,
rechatLlmFallback: RECHAT_LLM_FALLBACK.SEND_LOOK_FORWARD_EMOTION,
onlyRemindBossWithExpectJobType: true,
onlyRemindBossWithoutBlockCompanyName: true
onlyRemindBossWithoutBlockCompanyName: true,
openContentSource: OPEN_CONTENT_SOURCE.CONSTANT_CONTENT,
// openLlmFallback: OPEN_LLM_FALLBACK.SEND_CONSTANT_CONTENT,
constantOpenContent: ''
}
})
@@ -315,7 +417,10 @@ electron.ipcRenderer.invoke('fetch-config-file-content').then((res) => {
: parseInt(conf.recentMessageQuantityForLlm)
: 8
conf.onlyRemindBossWithExpectJobType = conf.onlyRemindBossWithExpectJobType ?? true
conf.onlyRemindBossWithoutBlockCompanyName = conf.onlyRemindBossWithoutBlockCompanyName ?? true
conf.rechatLlmFallback = conf.rechatLlmFallback ?? RECHAT_LLM_FALLBACK.SEND_LOOK_FORWARD_EMOTION
conf.openContentSource = conf.openContentSource ?? OPEN_CONTENT_SOURCE.CONSTANT_CONTENT
conf.constantOpenContent = conf.constantOpenContent ?? ''
formContent.value.autoReminder = conf
})
@@ -462,8 +567,9 @@ async function checkIsCanRun() {
return false
}
try {
await electron.ipcRenderer.invoke('check-if-auto-remind-prompt-valid')
await electron.ipcRenderer.invoke('check-if-auto-remind-prompt-valid', { type: 'rechat' })
} catch (err) {
console.log(err)
if (err?.message?.includes(`RESUME_PLACEHOLDER_NOT_EXIST`)) {
gtagRenderer('cannot_launch_for_no_resume_placehold')
console.log(`提示词模板无效`, err)
@@ -479,8 +585,10 @@ async function checkIsCanRun() {
}
)
.then(async () => {
gtagRenderer('confirm_invalid_rt_tip_dialog')
await restoreDefaultTemplate()
await restoreDefaultTemplate({
type: 'rechat',
gaEvName: 'confirm_invalid_rt_tip_dialog'
})
})
.catch(() => {
gtagRenderer('close_invalid_rt_tip_dialog')
@@ -511,7 +619,9 @@ const handleSubmit = async () => {
gtagRenderer('config_saved')
if (
formContent.value.autoReminder?.rechatContentSource ===
RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT
RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT ||
formContent.value.autoReminder?.openContentSource ===
OPEN_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT
) {
if (!(await checkIsCanRun())) {
return
@@ -568,8 +678,9 @@ function handleThrottleIntervalMinutesBlur() {
)
}
const restoreDefaultTemplate = async () => {
await electron.ipcRenderer.invoke('overwrite-auto-remind-prompt-with-default')
const restoreDefaultTemplate = async ({ type, gaEvName }) => {
gtagRenderer(gaEvName)
await electron.ipcRenderer.invoke('overwrite-auto-remind-prompt-with-default', { type })
ElMessage({
type: 'success',
message: '模板还原成功'
@@ -603,11 +714,12 @@ const handleClickEditResume = async () => {
}
}
const handleClickEditPrompt = async () => {
gtagRenderer('edit_prompt_clicked')
await electron.ipcRenderer.send('no-reply-reminder-prompt-edit')
const handleClickEditPrompt = async ({ type }) => {
gtagRenderer('edit_prompt_clicked', { type })
await electron.ipcRenderer.send('no-reply-reminder-prompt-edit', { type })
}
// for 跟进话术
const rechatLlmFallbackOptions = [
{
name: '发送“[盼回复]”表情',
@@ -619,6 +731,18 @@ const rechatLlmFallbackOptions = [
}
]
// // for 开场白话术
// const openLlmFallbackOptions = [
// {
// name: '发送固定文案',
// value: OPEN_LLM_FALLBACK.SEND_CONSTANT_CONTENT
// },
// {
// name: '退出已读不回自动复聊',
// value: OPEN_LLM_FALLBACK.EXIT_REMINDER_PROGRAM
// }
// ]
async function handleTestEffectClicked() {
gtagRenderer('goto_mock_chat_clicked')
if (!(await checkIsCanRun())) {
@@ -651,6 +775,16 @@ const handleStopButtonClick = async () => {
isStopButtonLoading.value = false
}
}
const defaultConstantOpenContent = computed(() => {
if (
formContent.value.autoReminder.rechatContentSource ===
RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT
) {
return DEFAULT_CONSTANT_OPEN_CONTENT_SEGS.join('')
}
return DEFAULT_CONSTANT_OPEN_CONTENT_SEGS[0]
})
</script>
<style lang="scss">

View File

@@ -22,39 +22,65 @@
<div class="pb20px"></div>
<div v-for="(item, index) in messageList" :key="index" flex flex-col flex-items-end>
<div class="message-item-wrap flex flex-col">
<div
class="message-item"
:class="{
'will-enter-context': getIsEnterContent(index)
}"
>
{{ item.text }}
</div>
<div
:style="{
width: 'fit-content',
alignSelf: 'flex-end'
}"
font-size-10px
>
{{ item.usedLlmConfig.model }}
</div>
<div
v-if="item?.usedLlmConfig?.providerCompleteApiUrl?.trim()"
:style="{
width: 'fit-content',
overflow: 'hidden',
whiteSpace: 'nowrap',
textOverflow: 'ellipsis',
alignSelf: 'flex-end',
color: '#bbb'
}"
font-size-10px
w-fit-content
max-w-20em
>
{{ item.usedLlmConfig.providerCompleteApiUrl }}
</div>
<template v-if="item.type === 'text'">
<div
class="message-item"
:class="{
'will-enter-context': getIsEnterContent(index)
}"
>
{{ item.text }}
</div>
</template>
<template v-else>
<div
class="message-item image-message-item"
:class="{
'will-enter-context': getIsEnterContent(index)
}"
>
<img :src="item.imageUrl" alt="" />
</div>
</template>
<!-- eslint-disable-next-line prettier/prettier -->
<template v-if="(typeof item.usedLlmConfig !== 'string')">
<div
:style="{
width: 'fit-content',
alignSelf: 'flex-end'
}"
font-size-10px
>
{{ item.usedLlmConfig.model }}
</div>
<div
v-if="item?.usedLlmConfig?.providerCompleteApiUrl?.trim()"
:style="{
width: 'fit-content',
overflow: 'hidden',
whiteSpace: 'nowrap',
textOverflow: 'ellipsis',
alignSelf: 'flex-end',
color: '#bbb'
}"
font-size-10px
w-fit-content
max-w-20em
>
{{ item.usedLlmConfig.providerCompleteApiUrl }}
</div>
</template>
<template v-else>
<div
:style="{
width: 'fit-content',
alignSelf: 'flex-end'
}"
font-size-10px
>
{{ item.usedLlmConfig }}
</div>
</template>
</div>
</div>
<div class="pb20px"></div>
@@ -147,15 +173,27 @@ import { computed, ref, watch } from 'vue'
import { sleep } from '@geekgeekrun/utils/sleep.mjs'
import { ElMessage } from 'element-plus'
import { gtagRenderer } from '@renderer/utils/gtag'
import {
OPEN_CONTENT_SOURCE,
RECHAT_CONTENT_SOURCE
} from '../../../../common/enums/auto-start-chat'
import { DEFAULT_CONSTANT_OPEN_CONTENT_SEGS } from '../../../../common/constant'
import lookForwardReplyEmotion from '../MainLayout/resources/look-forward-reply-emotion.gif'
type MessageItem = {
text: string
usedLlmConfig: string
type: 'text'
// recordInfo: any
}
const messageList = ref<MessageItem[]>([])
type ImageMessageItem = MessageItem & {
type: 'image'
imageUrl: string
}
const messageList = ref<(MessageItem | ImageMessageItem)[]>([])
const searchParams = Object.fromEntries(new URL(location.href).searchParams)
const recentMessageQuantityForLlm =
Number(new URL(location.href).searchParams.get('recentMessageQuantityForLlm')) || 8
const recentMessageQuantityForLlm = Number(searchParams.recentMessageQuantityForLlm) || 8
function getIsEnterContent(index) {
return messageList.value.length - index - 1 < recentMessageQuantityForLlm
}
@@ -188,32 +226,93 @@ watch(
const scrollElRef = ref(null)
const isLoading = ref(false)
const openContentSource = Number(searchParams.openContentSource)
const constantOpenContent = (() => {
if (searchParams.constantOpenContent?.trim()) {
return searchParams.constantOpenContent.trim()
}
if (Number(searchParams.rechatContentSource) === RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT) {
return DEFAULT_CONSTANT_OPEN_CONTENT_SEGS.join(``)
} else {
return DEFAULT_CONSTANT_OPEN_CONTENT_SEGS[0]
}
})()
const rechatContentSource = Number(searchParams.rechatContentSource)
async function sendLlmGeneratedContent() {
gtagRenderer('click_mock_chat_send')
isLoading.value = true
try {
const response = await electron.ipcRenderer.invoke('request-llm-for-test', {
messageList: JSON.parse(JSON.stringify((messageList.value ?? []).slice(-8))),
llmConfigIdForPick: selectedLlmConfig.value ? [selectedLlmConfig.value] : null
})
console.log(response)
messageList.value.push({
text: response.responseText,
usedLlmConfig: response.usedLlmConfig
})
await sleep(50)
;(scrollElRef.value as any as HTMLDivElement)?.scrollTo({
top: scrollElRef.value?.scrollHeight,
behavior: 'smooth'
})
} catch (err) {
ElMessage.error({
dangerouslyUseHTMLString: true,
grouping: true,
message: `<div>本次测试所使用的模型不可用</div><div style="margin-top: 10px; white-space: nowrap;">建议在大语言模型配置中关闭相关模型</div>`
})
} finally {
isLoading.value = false
if (!(messageList.value ?? []).length) {
// send open content
if (openContentSource === OPEN_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT) {
isLoading.value = true
try {
const response = await electron.ipcRenderer.invoke('request-llm-for-test', {
messageList: [],
llmConfigIdForPick: selectedLlmConfig.value ? [selectedLlmConfig.value] : null
})
console.log(response)
messageList.value.push({
type: 'text',
text: response.responseText,
usedLlmConfig: response.usedLlmConfig
})
await sleep(50)
;(scrollElRef.value as any as HTMLDivElement)?.scrollTo({
top: scrollElRef.value?.scrollHeight,
behavior: 'smooth'
})
} catch (err) {
ElMessage.error({
dangerouslyUseHTMLString: true,
grouping: true,
message: `<div>本次测试所使用的模型不可用</div><div style="margin-top: 10px; white-space: nowrap;">建议在大语言模型配置中关闭相关模型</div>`
})
} finally {
isLoading.value = false
}
} else {
messageList.value.push({
type: 'text',
text: constantOpenContent,
usedLlmConfig: '未使用大模型'
})
}
} else {
if (rechatContentSource === RECHAT_CONTENT_SOURCE.GEMINI_WITH_CHAT_CONTEXT) {
isLoading.value = true
try {
const response = await electron.ipcRenderer.invoke('request-llm-for-test', {
messageList: JSON.parse(JSON.stringify((messageList.value ?? []).slice(-8))),
llmConfigIdForPick: selectedLlmConfig.value ? [selectedLlmConfig.value] : null
})
console.log(response)
messageList.value.push({
type: 'text',
text: response.responseText,
usedLlmConfig: response.usedLlmConfig
})
await sleep(50)
;(scrollElRef.value as any as HTMLDivElement)?.scrollTo({
top: scrollElRef.value?.scrollHeight,
behavior: 'smooth'
})
} catch (err) {
ElMessage.error({
dangerouslyUseHTMLString: true,
grouping: true,
message: `<div>本次测试所使用的模型不可用</div><div style="margin-top: 10px; white-space: nowrap;">建议在大语言模型配置中关闭相关模型</div>`
})
} finally {
isLoading.value = false
}
} else {
messageList.value.push({
type: 'image',
text: `[盼回复] 表情`,
imageUrl: lookForwardReplyEmotion,
usedLlmConfig: '未使用大模型'
})
}
}
}
@@ -264,5 +363,12 @@ gtagRenderer('enter_mock_chat_page')
}
}
}
.message-item.image-message-item {
background-color: transparent;
width: 128px;
img {
width: 100%;
}
}
}
</style>