diff --git a/mkdocs_ai_summary/chatgpt_api.py b/mkdocs_ai_summary/chatgpt_api.py index 3125870..eaf3a35 100644 --- a/mkdocs_ai_summary/chatgpt_api.py +++ b/mkdocs_ai_summary/chatgpt_api.py @@ -21,6 +21,31 @@ def ask(prompt, model="gpt-3.5-turbo"): return completion.choices[0].message.content +def get_cache_dict(cache_dir, file_suffix="_ai_summary_cache2"): + cache_file = cache_dir + file_suffix + if os.path.exists(cache_file): + with open(cache_file, "r+") as f: + cache_dict = json.load(f) + else: + cache_dict = {} + return cache_dict + + +def ask_with_cache(question, page, content_md5, model, cache_dict, logger): + # asked before + if page in cache_dict: + if content_md5 == cache_dict[page]["content_md5"]: + ai_summary = cache_dict[page]["ai_summary"] + logger.info("Using cache.") + # asked before, but content changed + else: + ai_summary = ask(question, model=model) + # do not aksed before + else: + ai_summary = ask(question, model=model) + return ai_summary + + def get_summary_chatgpt( page, prompt, @@ -33,29 +58,17 @@ def get_summary_chatgpt( question = prompt + markdown if cache: content_md5 = md5(markdown.encode("utf-8")).hexdigest() - cache_file = f"{cache_dir}_ai_summary_cache2.json" - if os.path.exists(cache_file): - with open(cache_file, "r+") as f: - cache_dict = json.load(f) - else: - cache_dict = {} - - # asked before - if page in cache_dict: - if content_md5 == cache_dict[page]["content_md5"]: - ai_summary = cache_dict[page]["ai_summary"] - logger.info("Using cache.") - # asked before, but content changed - else: - ai_summary = ask(question, model=model) - # do not aksed before - else: - ai_summary = ask(question, model=model) - cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary} - with open(f"{cache_dir}/_ai_summary_cache2.json", "w+") as f: - cache_dict = json.dump(cache_dict, f) + cache_dict = get_cache_dict(cache_dir, file_suffix="_ai_summary_cache2.json") + ai_summary = ask_with_cache( + question, page, content_md5, model, cache_dict, logger + ) + # always refresh the cache + cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary} + with open(f"{cache_dir}/_ai_summary_cache2.json", "w+") as f: + cache_dict = json.dump(cache_dict, f, indent=4) else: ai_summary = ask(question, model=model) + removed_line_break = ai_summary.replace(r"\n", "") return f"""!!! chatgpt-summary "AI Summary powered by [ChatGPT](https://chat.openai.com/)" - {ai_summary} + {removed_line_break} """ diff --git a/mkdocs_ai_summary/tongyi_api.py b/mkdocs_ai_summary/tongyi_api.py index 7170885..8bb7bfb 100644 --- a/mkdocs_ai_summary/tongyi_api.py +++ b/mkdocs_ai_summary/tongyi_api.py @@ -6,6 +6,8 @@ from dashscope import Generation import logging +from .chatgpt_api import get_cache_dict, ask_with_cache + MAX_LENGTH = 6000 @@ -52,29 +54,17 @@ def get_summary_tongyi( question = (prompt + markdown)[: MAX_LENGTH - 10] if cache: content_md5 = md5(markdown.encode("utf-8")).hexdigest() - cache_file = f"{cache_dir}_ai_summary_cache.json" - if os.path.exists(cache_file): - with open(cache_file, "r+") as f: - cache_dict = json.load(f) - else: - cache_dict = {} - - # asked before - if page in cache_dict: - if content_md5 == cache_dict[page]["content_md5"]: - ai_summary = cache_dict[page]["ai_summary"] - logger.info("Using cache.") - # asked before, but content changed - else: - ai_summary = ask(question, model=model) - # do not aksed before - else: - ai_summary = ask(question, model=model) - cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary} - with open(f"{cache_dir}/_ai_summary_cache.json", "w+") as f: - cache_dict = json.dump(cache_dict, f) + cache_dict = get_cache_dict(cache_dir, file_suffix="_ai_summary_cache1.json") + ai_summary = ask_with_cache( + question, page, content_md5, model, cache_dict, logger + ) + # always refresh the cache + cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary} + with open(f"{cache_dir}/_ai_summary_cache2.json", "w+") as f: + cache_dict = json.dump(cache_dict, f, indent=4) else: ai_summary = ask(question, model=model) + removed_line_break = ai_summary.replace(r"\n", "") return f"""!!! tongyiai-summary "AI Summary powered by [通义千问](https://tongyi.aliyun.com/)" - {ai_summary} + {removed_line_break} """ diff --git a/pyproject.toml b/pyproject.toml index 3efae88..ba79c7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ where = ["."] [project] name = "mkdocs-ai-summary" -version = "0.2.3" +version = "0.2.4" requires-python = ">=3.10" dependencies = [ "mkdocs>=1.5.3", diff --git a/site/sitemap.xml.gz b/site/sitemap.xml.gz index 390591b..32ac0e5 100644 Binary files a/site/sitemap.xml.gz and b/site/sitemap.xml.gz differ diff --git a/site/withouth1/index.html b/site/withouth1/index.html index 79a8985..4e1f95b 100644 --- a/site/withouth1/index.html +++ b/site/withouth1/index.html @@ -706,10 +706,6 @@
AI Summary powered by 通义千问
-Python的语法学习包含单行逻辑处理和多行逻辑块,如缩进表示代码结构,4个空格为推荐缩进。自定义函数通过def
声明,接受参数,可返回值,支持按位置、名或混合方式传递。lambda
用于定义匿名函数。类定义涉及__new__
和__init__
方法,用于对象创建和初始化,支持类的继承。条件语句如if
、elif
用于基于布尔表达式的逻辑判断,input
函数获取用户输入。for
循环遍历可迭代对象,range
和iterable
是关键。while
循环在条件为真时持续执行。循环控制关键字包括continue
跳过当前循环,break
退出循环,pass
无操作。循环的else
子句在正常结束时执行。Python 3.10引入了match
语句。for
和while
循环有不同的适用场景和控制结构。
copy from https://yangzhang.site/Python/BasicSyntax/builtin_keyword/