Skip to content

Commit

Permalink
refactor, bug fix(cache donot work when file changed)
Browse files Browse the repository at this point in the history
  • Loading branch information
AIboy996 committed May 28, 2024
1 parent c2395d3 commit 5e3a3d0
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 49 deletions.
57 changes: 35 additions & 22 deletions mkdocs_ai_summary/chatgpt_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,31 @@ def ask(prompt, model="gpt-3.5-turbo"):
return completion.choices[0].message.content


def get_cache_dict(cache_dir, file_suffix="_ai_summary_cache2"):
cache_file = cache_dir + file_suffix
if os.path.exists(cache_file):
with open(cache_file, "r+") as f:
cache_dict = json.load(f)
else:
cache_dict = {}
return cache_dict


def ask_with_cache(question, page, content_md5, model, cache_dict, logger):
# asked before
if page in cache_dict:
if content_md5 == cache_dict[page]["content_md5"]:
ai_summary = cache_dict[page]["ai_summary"]
logger.info("Using cache.")
# asked before, but content changed
else:
ai_summary = ask(question, model=model)
# do not aksed before
else:
ai_summary = ask(question, model=model)
return ai_summary


def get_summary_chatgpt(
page,
prompt,
Expand All @@ -33,29 +58,17 @@ def get_summary_chatgpt(
question = prompt + markdown
if cache:
content_md5 = md5(markdown.encode("utf-8")).hexdigest()
cache_file = f"{cache_dir}_ai_summary_cache2.json"
if os.path.exists(cache_file):
with open(cache_file, "r+") as f:
cache_dict = json.load(f)
else:
cache_dict = {}

# asked before
if page in cache_dict:
if content_md5 == cache_dict[page]["content_md5"]:
ai_summary = cache_dict[page]["ai_summary"]
logger.info("Using cache.")
# asked before, but content changed
else:
ai_summary = ask(question, model=model)
# do not aksed before
else:
ai_summary = ask(question, model=model)
cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary}
with open(f"{cache_dir}/_ai_summary_cache2.json", "w+") as f:
cache_dict = json.dump(cache_dict, f)
cache_dict = get_cache_dict(cache_dir, file_suffix="_ai_summary_cache2.json")
ai_summary = ask_with_cache(
question, page, content_md5, model, cache_dict, logger
)
# always refresh the cache
cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary}
with open(f"{cache_dir}/_ai_summary_cache2.json", "w+") as f:
cache_dict = json.dump(cache_dict, f, indent=4)
else:
ai_summary = ask(question, model=model)
removed_line_break = ai_summary.replace(r"\n", "")
return f"""!!! chatgpt-summary "AI Summary powered by [ChatGPT](https://chat.openai.com/)"
{ai_summary}
{removed_line_break}
"""
34 changes: 12 additions & 22 deletions mkdocs_ai_summary/tongyi_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from dashscope import Generation
import logging

from .chatgpt_api import get_cache_dict, ask_with_cache

MAX_LENGTH = 6000


Expand Down Expand Up @@ -52,29 +54,17 @@ def get_summary_tongyi(
question = (prompt + markdown)[: MAX_LENGTH - 10]
if cache:
content_md5 = md5(markdown.encode("utf-8")).hexdigest()
cache_file = f"{cache_dir}_ai_summary_cache.json"
if os.path.exists(cache_file):
with open(cache_file, "r+") as f:
cache_dict = json.load(f)
else:
cache_dict = {}

# asked before
if page in cache_dict:
if content_md5 == cache_dict[page]["content_md5"]:
ai_summary = cache_dict[page]["ai_summary"]
logger.info("Using cache.")
# asked before, but content changed
else:
ai_summary = ask(question, model=model)
# do not aksed before
else:
ai_summary = ask(question, model=model)
cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary}
with open(f"{cache_dir}/_ai_summary_cache.json", "w+") as f:
cache_dict = json.dump(cache_dict, f)
cache_dict = get_cache_dict(cache_dir, file_suffix="_ai_summary_cache1.json")
ai_summary = ask_with_cache(
question, page, content_md5, model, cache_dict, logger
)
# always refresh the cache
cache_dict[page] = {"content_md5": content_md5, "ai_summary": ai_summary}
with open(f"{cache_dir}/_ai_summary_cache2.json", "w+") as f:
cache_dict = json.dump(cache_dict, f, indent=4)
else:
ai_summary = ask(question, model=model)
removed_line_break = ai_summary.replace(r"\n", "")
return f"""!!! tongyiai-summary "AI Summary powered by [通义千问](https://tongyi.aliyun.com/)"
{ai_summary}
{removed_line_break}
"""
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ where = ["."]

[project]
name = "mkdocs-ai-summary"
version = "0.2.3"
version = "0.2.4"
requires-python = ">=3.10"
dependencies = [
"mkdocs>=1.5.3",
Expand Down
Binary file modified site/sitemap.xml.gz
Binary file not shown.
4 changes: 0 additions & 4 deletions site/withouth1/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -706,10 +706,6 @@

<h1>内置关键字(without h1, tongyi ai summary)</h1>

<div class="admonition tongyiai-summary">
<p class="admonition-title">AI Summary powered by <a href="https://tongyi.aliyun.com/">通义千问</a></p>
<p>Python的语法学习包含单行逻辑处理和多行逻辑块,如缩进表示代码结构,4个空格为推荐缩进。自定义函数通过<code>def</code>声明,接受参数,可返回值,支持按位置、名或混合方式传递。<code>lambda</code>用于定义匿名函数。类定义涉及<code>__new__</code><code>__init__</code>方法,用于对象创建和初始化,支持类的继承。条件语句如<code>if</code><code>elif</code>用于基于布尔表达式的逻辑判断,<code>input</code>函数获取用户输入。<code>for</code>循环遍历可迭代对象,<code>range</code><code>iterable</code>是关键。<code>while</code>循环在条件为真时持续执行。循环控制关键字包括<code>continue</code>跳过当前循环,<code>break</code>退出循环,<code>pass</code>无操作。循环的<code>else</code>子句在正常结束时执行。Python 3.10引入了<code>match</code>语句。<code>for</code><code>while</code>循环有不同的适用场景和控制结构。</p>
</div>
<blockquote>
<p>copy from <a href="https://yangzhang.site/Python/BasicSyntax/builtin_keyword/">https://yangzhang.site/Python/BasicSyntax/builtin_keyword/</a></p>
</blockquote>
Expand Down

0 comments on commit 5e3a3d0

Please sign in to comment.