Skip to content

Commit

Permalink
故事引擎接入模糊搜索,提供缺省帮助
Browse files Browse the repository at this point in the history
  • Loading branch information
lunzhiPenxil committed Jan 4, 2024
1 parent 02f83ff commit 077deb0
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 11 deletions.
25 changes: 22 additions & 3 deletions OlivaStoryCore/msgCustom.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@
'strStoryCoreStoryTall': '{tStoryCoreResult}\n\n{tStoryCoreSelection}',
'strStoryCoreStoryTallNone': '故事不存在',
'strStoryCoreStoryTallBreak': '故事中断了',
'strStoryCoreStoryTallEnd': '故事结束了'
'strStoryCoreStoryTallEnd': '故事结束了',
'strStoryCoreStoryRecommend': '未找到故事,您可能想要的是:\n{tStoryCoreRecommend}'
}

dictStrConst = {}
Expand All @@ -37,9 +38,27 @@

dictTValue = {
'tStoryCoreResult': 'N/A',
'tStoryCoreSelection': 'N/A'
'tStoryCoreSelection': 'N/A',
'tStoryCoreRecommend': 'N/A'
}

dictHelpDocTemp = {}
dictHelpDocTemp = {
'story': '''故事引擎模块:
[.story [故事名称]] 开启对应的故事
[.story end] 结束故事''',

'OlivaStoryCore': '''[OlivaStoryCore]
OlivaStory核心模块
本模块为青果跑团掷骰机器人(OlivaDice)的故事引擎模块,新一代文游引擎,它的设计初衷是为了让骰主能够更加方便地进行文游的设计,以文游的方式进行带团。
核心开发者: lunzhiPenxil仑质
[.help OlivaStoryCore更新] 查看本模块更新日志
注: 本模块为可选模块。''',

'OlivaStoryCore更新': '''[OlivaStoryCore]
3.0.1: 优化体验
3.0.0: 初始化项目''',

'故事引擎': '&story',
}

dictUserConfigNoteDefault = {}
53 changes: 45 additions & 8 deletions OlivaStoryCore/msgReply.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,14 +249,29 @@ def unity_reply(plugin_event, Proc):
storyName = tmp_story_name,
chatToken = tmp_chat_token
)
tmp_reply_str = getStoryTall(
plugin_event = plugin_event,
dictStrCustom = dictStrCustom,
dictTValue = dictTValue,
nodeData = tmp_nodeData,
flagIsStart = True,
noteList = tmp_noteList
)
if tmp_nodeData is not None:
tmp_reply_str = getStoryTall(
plugin_event = plugin_event,
dictStrCustom = dictStrCustom,
dictTValue = dictTValue,
nodeData = tmp_nodeData,
flagIsStart = True,
noteList = tmp_noteList
)
else:
tmp_reply_str = None
searchList = getStorySearchList(tmp_story_name, tmp_botHash)
if len(searchList) > 0:
dictTValue['tStoryCoreRecommend'] = '\n'.join([f'[.story {storyName_this}]' for storyName_this in searchList])
tmp_reply_str = OlivaDiceCore.msgCustomManager.formatReplySTR(dictStrCustom['strStoryCoreStoryRecommend'], dictTValue)
else:
tmp_reply_str = OlivaDiceCore.msgCustomManager.formatReplySTR(dictStrCustom['strStoryCoreStoryTallNone'], dictTValue)
if tmp_reply_str != None:
replyMsg(plugin_event, tmp_reply_str)
else:
tmp_reply_str = OlivaDiceCore.helpDoc.getHelp('story', plugin_event.bot_info.hash)
if tmp_reply_str != None:
replyMsg(plugin_event, tmp_reply_str)
else:
tmp_platform = plugin_event.platform['platform']
tmp_botHash = plugin_event.bot_info.hash
Expand Down Expand Up @@ -483,3 +498,25 @@ def getStoryTall(
except Exception as e:
traceback.print_exc()
return res

def getStorySearchList(storyName_search:str, botHash:str):
res = []
tmp_RecommendRank_list = []
if botHash in OlivaStoryCore.storyEngine.storyList:
for storyName_this in OlivaStoryCore.storyEngine.storyList[botHash]:
tmp_RecommendRank_list.append([
OlivaDiceCore.helpDoc.getRecommendRank(
storyName_search,
storyName_this
),
storyName_this
])
tmp_RecommendRank_list.sort(key = lambda x : x[0])
tmp_count_max = min(8, len(tmp_RecommendRank_list))
count = 0
while count < tmp_count_max:
if tmp_RecommendRank_list[count][0] < 1000:
if len(tmp_RecommendRank_list[count][1]) < 25:
res.append(tmp_RecommendRank_list[count][1])
count += 1
return res

0 comments on commit 077deb0

Please sign in to comment.