一款基于大语言模型的多功能小说生成器,集成了完整的创作工作流,从世界观架构到章节生成,再到一致性维护,为创作者提供全方位的智能写作辅助。
如遇依赖安装问题,可安装C++编译工具:
首先在GUI界面中配置LLM接口参数:
# 示例配置结构
{
"api_key": "your_api_key",
"base_url": "https://api.deepseek.com/v1",
"model_name": "deepseek-chat",
"temperature": 0.7,
"max_tokens": 8192
}
Novel_architecture_generate(
interface_format="OpenAI",
api_key=api_key,
base_url=base_url,
llm_model="deepseek-chat",
topic="科幻未来",
genre="硬科幻",
number_of_chapters=20,
word_number=3000,
filepath="./novel_project"
)
Chapter_blueprint_generate(
interface_format=interface_format,
api_key=api_key,
base_url=base_url,
llm_model=model_name,
filepath=filepath,
number_of_chapters=number_of_chapters
)
# 生成章节草稿
chapter_draft = generate_chapter_draft(
interface_format=interface_format,
api_key=api_key,
base_url=base_url,
model_name=model_name,
temperature=temperature,
filepath=filepath,
novel_number=chapter_num,
user_guidance=user_guidance
)
consistency_result = check_consistency(
novel_setting=novel_setting,
character_state=character_state,
global_summary=global_summary,
chapter_text=chapter_text,
api_key=api_key,
base_url=base_url,
model_name=model_name
)
导入参考文档增强创作素材:
import_knowledge_file(
embedding_api_key=embedding_api_key,
embedding_url=embedding_url,
embedding_interface_format=embedding_interface_format,
embedding_model_name=embedding_model_name,
file_path="./knowledge.txt",
filepath=filepath
)
def parse_chapter_blueprint(blueprint_text: str):
"""
解析整份章节蓝图文本,返回结构化数据
返回格式:
{
"chapter_number": int, # 章节编号
"chapter_title": str, # 章节标题
"chapter_role": str, # 本章定位
"chapter_purpose": str, # 核心作用
"suspense_level": str, # 悬念密度
"foreshadowing": str, # 伏笔操作
"plot_twist_level": str, # 认知颠覆
"chapter_summary": str # 本章简述
}
"""
chunks = re.split(r'\n\s*\n', blueprint_text.strip())
results = []
# 正则模式匹配各字段
chapter_number_pattern = re.compile(r'^第\s*(\d+)\s*章\s*-\s*\[?(.*?)\]?$')
role_pattern = re.compile(r'^本章定位:\s*\[?(.*)\]?$')
purpose_pattern = re.compile(r'^核心作用:\s*\[?(.*)\]?$')
for chunk in chunks:
lines = chunk.strip().splitlines()
if not lines:
continue
# 解析章节头部信息
header_match = chapter_number_pattern.match(lines[0].strip())
if header_match:
chapter_number = int(header_match.group(1))
chapter_title = header_match.group(2).strip()
# 解析其他字段
chapter_data = {
"chapter_number": chapter_number,
"chapter_title": chapter_title,
"chapter_role": "",
"chapter_purpose": "",
"suspense_level": "",
"foreshadowing": "",
"plot_twist_level": "",
"chapter_summary": ""
}
# 遍历剩余行匹配各字段
for line in lines[1:]:
line_stripped = line.strip()
# 各字段模式匹配...
results.append(chapter_data)
# 按章节编号排序返回
results.sort(key=lambda x: x["chapter_number"])
return results
def check_consistency(
novel_setting: str,
character_state: str,
global_summary: str,
chapter_text: str,
api_key: str,
base_url: str,
model_name: str,
temperature: float = 0.3,
plot_arcs: str = "",
interface_format: str = "OpenAI",
max_tokens: int = 2048,
timeout: int = 600
) -> str:
"""
调用模型进行一致性检查,检测剧情冲突和逻辑不一致
新增对未解决冲突或剧情要点的衔接情况检查
"""
# 构建检查提示词
prompt = CONSISTENCY_PROMPT.format(
novel_setting=novel_setting,
character_state=character_state,
global_summary=global_summary,
plot_arcs=plot_arcs,
chapter_text=chapter_text
)
# 创建LLM适配器
llm_adapter = create_llm_adapter(
interface_format=interface_format,
base_url=base_url,
model_name=model_name,
api_key=api_key,
temperature=temperature,
max_tokens=max_tokens,
timeout=timeout
)
# 调用模型进行检查
response = llm_adapter.invoke(prompt)
return response if response else "审校Agent无回复"
def load_config(config_file: str) -> dict:
"""从指定配置文件加载配置,不存在时创建默认配置"""
if not os.path.exists(config_file):
create_config(config_file)
try:
with open(config_file, 'r', encoding='utf-8') as f:
return json.load(f)
except:
return {}
def create_config(config_file: str) -> dict:
"""创建默认配置文件,包含多种模型配置"""
config = {
"last_interface_format": "OpenAI",
"last_embedding_interface_format": "OpenAI",
"llm_configs": {
"DeepSeek V3": {
"api_key": "",
"base_url": "https://api.deepseek.com/v1",
"model_name": "deepseek-chat",
"temperature": 0.7,
"max_tokens": 8192,
"timeout": 600,
"interface_format": "OpenAI"
},
# 其他模型配置...
},
"embedding_configs": {
"OpenAI": {
"api_key": "",
"base_url": "https://api.openai.com/v1",
"model_name": "text-embedding-ada-002",
"retrieval_k": 4,
"interface_format": "OpenAI"
}
}
}
return config
class BaseLLMAdapter:
"""统一的LLM接口基类,为不同后端提供一致的方法签名"""
def invoke(self, prompt: str) -> str:
raise NotImplementedError("Subclasses must implement .invoke(prompt) method.")
class DeepSeekAdapter(BaseLLMAdapter):
"""DeepSeek官方/OpenAI兼容接口适配器"""
def __init__(self, api_key: str, base_url: str, model_name: str,
max_tokens: int, temperature: float = 0.7,
timeout: Optional[int] = 600):
self.base_url = check_base_url(base_url)
self.api_key = api_key
self.model_name = model_name
self.max_tokens = max_tokens
self.temperature = temperature
self.timeout = timeout
self._client = ChatOpenAI(
model=self.model_name,
api_key=self.api_key,
base_url=self.base_url,
max_tokens=self.max_tokens,
temperature=self.temperature,
timeout=self.timeout
)
def invoke(self, prompt: str) -> str:
"""调用模型生成内容"""
response = self._client.invoke(prompt)
return response
该项目通过模块化设计和统一的接口抽象,为小说创作提供了完整的AI辅助解决方案,显著提升了创作效率和质量一致性。
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。