Various updates
This commit is contained in:
parent
07a4b80301
commit
fc2c279836
BIN
__pycache__/tasks.cpython-310.pyc
Normal file
BIN
__pycache__/tasks.cpython-310.pyc
Normal file
Binary file not shown.
207
local-agent.py
207
local-agent.py
|
@ -1,146 +1,83 @@
|
||||||
from crewai import Agent, Task, Crew
|
from flask import Flask, jsonify, render_template, request
|
||||||
from crewai_tools import SerperDevTool, ScrapeWebsiteTool, SeleniumScrapingTool
|
from flask_socketio import SocketIO, emit
|
||||||
from langchain_community.chat_models import ChatOllama
|
from pymongo import MongoClient
|
||||||
from langchain_openai import ChatOpenAI
|
from bson.objectid import ObjectId
|
||||||
from flask import Flask, jsonify
|
import threading
|
||||||
import os
|
from tasks import generate_content
|
||||||
|
import html
|
||||||
# export OPENAI_API_BASE=http://the.mk:11434/v1
|
|
||||||
# export OPENAI_MODEL_NAME=crewai-llama3
|
|
||||||
# export SERPER_API_KEY=
|
|
||||||
|
|
||||||
# class MyAgent(Agent):
|
|
||||||
# def __init__(self, role="My Math Professor", goal=None, backstory=None, allow_delegation=False, verbose=True, llm=None):
|
|
||||||
# super().__init__(role=role, goal=goal, backstory=backstory, allow_delegation=allow_delegation, verbose=verbose, llm=llm)
|
|
||||||
# def to_dict(self):
|
|
||||||
# return {
|
|
||||||
# 'role': self.role,
|
|
||||||
# 'goal': self.goal,
|
|
||||||
# 'backstory': self.backstory,
|
|
||||||
# 'allow_delegation': self.allow_delegation,
|
|
||||||
# 'verbose': self.verbose
|
|
||||||
# }
|
|
||||||
|
|
||||||
|
|
||||||
os.environ["OPENAI_API_KEY"] = "NA"
|
|
||||||
|
|
||||||
llm = ChatOpenAI(
|
|
||||||
model = "crewai-llama3",
|
|
||||||
base_url = "http://chat.the.mk:11434/v1",
|
|
||||||
temperature=0.1)
|
|
||||||
|
|
||||||
# general_agent = MyAgent(role = "Math Professor",
|
|
||||||
# goal = """Provide the solution to the students that are asking mathematical questions and give them the answer.""",
|
|
||||||
# backstory = """You are an excellent math professor that likes to solve math questions in a way that everyone can understand your solution""",
|
|
||||||
# allow_delegation = False,
|
|
||||||
# verbose = True,
|
|
||||||
# llm = llm)
|
|
||||||
# task = Task (description="""what is 3 + 5""",
|
|
||||||
# agent = general_agent,
|
|
||||||
# expected_output="A numerical answer.")
|
|
||||||
|
|
||||||
# crew = Crew(
|
|
||||||
# agents=[general_agent],
|
|
||||||
# tasks=[task],
|
|
||||||
# verbose=2
|
|
||||||
# )
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Loading Tools
|
|
||||||
search_tool = SerperDevTool()
|
|
||||||
scrape_tool = SeleniumScrapingTool()
|
|
||||||
|
|
||||||
|
|
||||||
# Define your agents with roles, goals, tools, and additional attributes
|
|
||||||
researcher = Agent(
|
|
||||||
role='Senior Research Analyst',
|
|
||||||
goal='Uncover cutting-edge developments in AI and data science',
|
|
||||||
backstory=(
|
|
||||||
"You are a Senior Research Analyst at a leading tech think tank."
|
|
||||||
"Your expertise lies in identifying emerging trends and technologies in AI and data science."
|
|
||||||
"You have a knack for dissecting complex data and presenting actionable insights."
|
|
||||||
"Always search the web first and make the determination for the best 4 Links"
|
|
||||||
"if you are going to use Read website content tool replace the search positional argument: to 'website_url'"
|
|
||||||
# "Don't include single or double quotes in any or the searches or parameters for the tools including website_url or css_element"
|
|
||||||
"For any web searches, be sure to scrape the website content from the Link in the search"
|
|
||||||
),
|
|
||||||
verbose=True,
|
|
||||||
allow_delegation=False,
|
|
||||||
tools=[search_tool, scrape_tool],
|
|
||||||
max_rpm=100
|
|
||||||
)
|
|
||||||
|
|
||||||
writer = Agent(
|
|
||||||
role='Tech Content Strategist',
|
|
||||||
goal='Craft compelling content on tech advancements',
|
|
||||||
backstory=(
|
|
||||||
"You are a renowned Tech Content Strategist, known for your insightful and engaging articles on technology and innovation."
|
|
||||||
"With a deep understanding of the tech industry, you transform complex concepts into compelling narratives."
|
|
||||||
"Always search the web first and make the determination for the best 4 Links"
|
|
||||||
"if you are going to use Read website content tool replace the search positional argument: to 'website_url'"
|
|
||||||
# "Don't include single or double quotes in any or the searches or parameters for the tools including website_url or css_element"
|
|
||||||
"For any web searches, be sure to scrape the website content from the Link in the search"
|
|
||||||
),
|
|
||||||
verbose=True,
|
|
||||||
allow_delegation=True,
|
|
||||||
tools=[search_tool, scrape_tool],
|
|
||||||
cache=False, # Disable cache for this agent
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create tasks for your agents
|
|
||||||
task1 = Task(
|
|
||||||
description=(
|
|
||||||
"Conduct a comprehensive analysis of the latest advancements in AI in 2024."
|
|
||||||
"Identify key trends, breakthrough technologies, and potential industry impacts."
|
|
||||||
"Compile your findings in a detailed report."
|
|
||||||
"Make sure to check with a human if the draft is good before finalizing your answer."
|
|
||||||
),
|
|
||||||
expected_output='A comprehensive full report on the latest AI advancements in 2024, leave nothing out',
|
|
||||||
agent=researcher,
|
|
||||||
human_input=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
task2 = Task(
|
|
||||||
description=(
|
|
||||||
"Using the insights from the researcher's report, develop an engaging blog post that highlights the most significant AI advancements."
|
|
||||||
"Your post should be informative yet accessible, catering to a tech-savvy audience."
|
|
||||||
"Aim for a narrative that captures the essence of these breakthroughs and their implications for the future."
|
|
||||||
),
|
|
||||||
expected_output='A compelling three paragraphs blog post formatted as markdown with headings, subheadings, and a main thesis about the latest AI advancements in 2024',
|
|
||||||
agent=writer
|
|
||||||
)
|
|
||||||
|
|
||||||
# Instantiate your crew with a sequential process
|
|
||||||
crew = Crew(
|
|
||||||
agents=[researcher, writer],
|
|
||||||
tasks=[task1, task2],
|
|
||||||
verbose=4
|
|
||||||
)
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
def get_agents(cls):
|
# MongoDB Atlas connection
|
||||||
return [agent.__dict__ for agent in cls.agents]
|
client = MongoClient("mongodb+srv://maheshkommareddi:Yu2L6pQKyJgcTb9a@cluster0.qadl40g.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0")
|
||||||
|
db = client.content_generation
|
||||||
|
app.config['SECRET_KEY'] = 'secret_key'
|
||||||
|
socketio = SocketIO(app)
|
||||||
|
|
||||||
def get_tasks(cls):
|
# Periodically fetch and emit task updates
|
||||||
return [{**task.__dict__, 'expected_output': task.expected_output} for task in cls.tasks]
|
def fetch_and_emit_updates():
|
||||||
|
while True:
|
||||||
|
updates = db.task_updates.find({})
|
||||||
|
for update in updates:
|
||||||
|
socketio.emit('agent_update', {
|
||||||
|
'task_id': update['task_id'],
|
||||||
|
'status': update['status'],
|
||||||
|
'message': update['message']
|
||||||
|
})
|
||||||
|
db.task_updates.delete_one({"_id": update["_id"]}) # Remove the update after emitting
|
||||||
|
socketio.sleep(5)
|
||||||
|
|
||||||
|
# Start the periodic update thread
|
||||||
|
threading.Thread(target=fetch_and_emit_updates).start()
|
||||||
|
|
||||||
@app.route('/agents', methods=['GET'])
|
# Route for the chat interface
|
||||||
def get_agents_route():
|
@app.route('/chat')
|
||||||
return jsonify({'agents': get_agents(crew)})
|
def chat():
|
||||||
|
return render_template('chat.html')
|
||||||
|
|
||||||
@app.route('/tasks', methods=['GET'])
|
# SocketIO event handler for new content generation requests
|
||||||
def get_tasks_route():
|
@socketio.on('generate_content')
|
||||||
return jsonify({'tasks': get_tasks(crew)})
|
def handle_generate_content(data):
|
||||||
|
agenda = data['agenda']
|
||||||
|
task = generate_content.apply_async(args=[agenda])
|
||||||
|
task_id = task.id
|
||||||
|
emit('task_queued', {'task_id': task_id, 'agenda': agenda})
|
||||||
|
|
||||||
@app.route('/kickoff', methods=['GET'])
|
@app.route('/', methods=['GET', 'POST'])
|
||||||
def get_kickoff_route():
|
def index():
|
||||||
result = crew.kickoff()
|
# Fetch all existing blog entries from MongoDB Atlas
|
||||||
print(result)
|
blog_entries = list(db.content.find({}, {"_id": 1, "agenda": 1, "blog_post": 1}))
|
||||||
return result
|
|
||||||
|
|
||||||
|
if request.method == 'POST':
|
||||||
|
agenda = request.form.get('agenda', 'quantum gravitational sensing for underground detection')
|
||||||
|
|
||||||
|
# Queue the task for background processing
|
||||||
|
task = generate_content.apply_async(args=[agenda])
|
||||||
|
|
||||||
|
# Save the task ID to MongoDB Atlas
|
||||||
|
content = {
|
||||||
|
"agenda": agenda,
|
||||||
|
"task_id": str(task.id),
|
||||||
|
"status": "PENDING"
|
||||||
|
}
|
||||||
|
db.content.insert_one(content)
|
||||||
|
|
||||||
|
return render_template('processing.html', agenda=agenda, task_id=task.id)
|
||||||
|
|
||||||
|
return render_template('index.html', blog_entries=blog_entries)
|
||||||
|
|
||||||
|
@app.route('/blog/<id>', methods=['GET'])
|
||||||
|
def blog_post(id):
|
||||||
|
# Fetch the blog entry from MongoDB Atlas
|
||||||
|
blog_entry = db.content.find_one({"_id": ObjectId(id)})
|
||||||
|
|
||||||
|
if blog_entry:
|
||||||
|
if blog_entry.get('status') == 'COMPLETED':
|
||||||
|
return render_template('result.html', report=html.unescape(blog_entry['report']), blog_post=blog_entry['blog_post'])
|
||||||
|
else:
|
||||||
|
return render_template('result.html', report=html.unescape(blog_entry['report']), blog_post=blog_entry['blog_post'])
|
||||||
|
else:
|
||||||
|
return "Blog entry not found", 404
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app.run(debug=True, port=5001)
|
socketio.run(app, debug=True, port=5001)
|
||||||
|
|
143
tasks.py
Normal file
143
tasks.py
Normal file
|
@ -0,0 +1,143 @@
|
||||||
|
from celery import Celery
|
||||||
|
from crewai import Agent, Task, Crew, tasks
|
||||||
|
from crewai_tools import SerperDevTool, ScrapeWebsiteTool, SeleniumScrapingTool
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
from pymongo import MongoClient
|
||||||
|
import langchain_core
|
||||||
|
import html
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Set up logging
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
client = MongoClient("mongodb+srv://maheshkommareddi:Yu2L6pQKyJgcTb9a@cluster0.qadl40g.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0")
|
||||||
|
db = client.content_generation
|
||||||
|
|
||||||
|
# Initialize Celery
|
||||||
|
app = Celery('tasks', broker='amqp://guest:guest@localhost:5672//')
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
os.environ["OPENAI_API_KEY"] = "sk-kkk"
|
||||||
|
os.environ["OPENAI_MODEL_NAME"] = "anthropic.claude-3-sonnet-20240229-v1:0"
|
||||||
|
os.environ["OPENAI_API_BASE"] = "http://chat.the.mk:1337"
|
||||||
|
|
||||||
|
# Initialize LLM
|
||||||
|
llm = ChatOpenAI(
|
||||||
|
model="anthropic.claude-3-sonnet-20240229-v1:0",
|
||||||
|
base_url="http://chat.the.mk:1337",
|
||||||
|
temperature=0.1
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load tools
|
||||||
|
search_tool = SerperDevTool()
|
||||||
|
scrape_tool = SeleniumScrapingTool()
|
||||||
|
scrape_tool_bare = ScrapeWebsiteTool()
|
||||||
|
|
||||||
|
# Function to update task status in MongoDB
|
||||||
|
def update_task_status(task_id, status, message):
|
||||||
|
try:
|
||||||
|
db.task_updates.insert_one({"task_id": task_id, "status": status, "message": message})
|
||||||
|
logger.info(f"Updated task status: {task_id}, {status}, {message}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating task status: {e}")
|
||||||
|
|
||||||
|
# Define tasks
|
||||||
|
@app.task
|
||||||
|
def generate_content(agenda):
|
||||||
|
def researcher_callback(output):
|
||||||
|
if isinstance(output, langchain_core.agents.AgentFinish):
|
||||||
|
update_task_status(app.current_task.request.id, f"researcher", "Agent finished")
|
||||||
|
elif output and output[0]:
|
||||||
|
print(output[0])
|
||||||
|
if output[0][0]:
|
||||||
|
update_task_status(app.current_task.request.id, f"researcher", output[0][0].log)
|
||||||
|
|
||||||
|
def writer_callback(output):
|
||||||
|
if isinstance(output, langchain_core.agents.AgentFinish):
|
||||||
|
update_task_status(app.current_task.request.id, f"writer", "Agent finished")
|
||||||
|
elif output and output[0]:
|
||||||
|
print(output[0])
|
||||||
|
if output[0][0]:
|
||||||
|
update_task_status(app.current_task.request.id, f"writer", output[0][0].log)
|
||||||
|
|
||||||
|
researcher = Agent(
|
||||||
|
role='Senior Research Analyst',
|
||||||
|
goal='Find way to explain ' + agenda,
|
||||||
|
backstory=(
|
||||||
|
"You are a Senior Research Analyst at a leading tech think tank."
|
||||||
|
f"Your expertise lies in identifying {agenda}."
|
||||||
|
"You have a knack for dissecting complex data and presenting actionable insights."
|
||||||
|
"Always search the web first and make the determination for the best 4 Links, but exclude PDFs"
|
||||||
|
"For any web searches, be sure to scrape the website content from the Link in the search"
|
||||||
|
),
|
||||||
|
verbose=True,
|
||||||
|
allow_delegation=False,
|
||||||
|
tools=[search_tool, scrape_tool, scrape_tool_bare],
|
||||||
|
max_rpm=100,
|
||||||
|
step_callback=researcher_callback
|
||||||
|
)
|
||||||
|
|
||||||
|
writer = Agent(
|
||||||
|
role='Tech Content Strategist',
|
||||||
|
goal='Craft compelling content on ' + agenda,
|
||||||
|
backstory=(
|
||||||
|
"You are a renowned Tech Content Strategist, known for your insightful and engaging articles on science and innovation."
|
||||||
|
"With a deep understanding of the tech industry, you transform complex concepts into compelling narratives."
|
||||||
|
"For any web searches, be sure to scrape the website content from the Link in the search, but exclude PDFs"
|
||||||
|
),
|
||||||
|
verbose=True,
|
||||||
|
allow_delegation=True,
|
||||||
|
tools=[search_tool, scrape_tool, scrape_tool_bare],
|
||||||
|
cache=False, # Disable cache for this agent
|
||||||
|
step_callback=writer_callback
|
||||||
|
)
|
||||||
|
|
||||||
|
task1 = Task(
|
||||||
|
description=(
|
||||||
|
f"Conduct a comprehensive analysis of the latest in {agenda}"
|
||||||
|
"Identify key trends, breakthrough technologies, and potential industry impacts."
|
||||||
|
"Compile your findings in a detailed report and include references and links to the source material."
|
||||||
|
),
|
||||||
|
expected_output=f"A comprehensive full report on {agenda} in 2024, leave nothing out",
|
||||||
|
agent=researcher,
|
||||||
|
)
|
||||||
|
|
||||||
|
task2 = Task(
|
||||||
|
description=(
|
||||||
|
f"Using the insights from the researcher's report, develop an engaging blog post that highlights the most significant {agenda} ideas."
|
||||||
|
"Your post should be informative yet accessible, catering to a tech-savvy audience."
|
||||||
|
"Aim for a narrative that captures the essence of these breakthroughs and their implications for the future."
|
||||||
|
"Keep asking for research and revise until the minimum 5000 words are met"
|
||||||
|
"Include the research in the entirety along with the resulting report"
|
||||||
|
"Include at least five links to external pages or PDFs with an appropriate anchor tag in the final report"
|
||||||
|
),
|
||||||
|
expected_output=f"A compelling ten paragraphs blog post formatted as html to place inside the body tag with headings, subheadings, and a main thesis about the latest {agenda}",
|
||||||
|
agent=writer
|
||||||
|
)
|
||||||
|
|
||||||
|
# Define the main callback for the crew
|
||||||
|
def main_callback(output: tasks.task_output.TaskOutput):
|
||||||
|
update_task_status(app.current_task.request.id, 1, output.description)
|
||||||
|
|
||||||
|
# Instantiate your crew with a sequential process
|
||||||
|
crew = Crew(
|
||||||
|
agents=[researcher, writer],
|
||||||
|
tasks=[task1, task2],
|
||||||
|
verbose=4,
|
||||||
|
task_callback=main_callback
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
result = crew.kickoff()
|
||||||
|
|
||||||
|
# Save the result to MongoDB Atlas
|
||||||
|
content = {
|
||||||
|
"agenda": agenda,
|
||||||
|
"report": html.unescape(result),
|
||||||
|
"blog_post": html.unescape(result)
|
||||||
|
}
|
||||||
|
db.content.insert_one(content)
|
||||||
|
|
||||||
|
return result
|
58
templates/chat.html
Normal file
58
templates/chat.html
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Content Generation Chat</title>
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.5.4/socket.io.js"></script>
|
||||||
|
<script>
|
||||||
|
var socket = io();
|
||||||
|
|
||||||
|
socket.on('connect', function() {
|
||||||
|
console.log('Connected to server');
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on('task_queued', function(data) {
|
||||||
|
var taskId = data.task_id;
|
||||||
|
var agenda = data.agenda;
|
||||||
|
addMessage('System', `Task ${taskId} queued for generating content on "${agenda}".`);
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on('agent_update', function(data) {
|
||||||
|
var taskId = data.task_id;
|
||||||
|
var agentId = data.agent_id;
|
||||||
|
var status = data.status;
|
||||||
|
var message = data.message;
|
||||||
|
addMessage(`Agent ${agentId}`, `[${status}] ${message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
function addMessage(sender, message) {
|
||||||
|
var chatLog = document.getElementById('chat-log');
|
||||||
|
var messageElement = document.createElement('div');
|
||||||
|
messageElement.innerHTML = `<strong>${sender}:</strong> ${message}`;
|
||||||
|
chatLog.appendChild(messageElement);
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateContent() {
|
||||||
|
var agendaInput = document.getElementById('agenda-input');
|
||||||
|
var agenda = agendaInput.value.trim();
|
||||||
|
if (agenda) {
|
||||||
|
socket.emit('generate_content', { agenda: agenda });
|
||||||
|
agendaInput.value = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<style>
|
||||||
|
#chat-log {
|
||||||
|
height: 300px;
|
||||||
|
overflow-y: scroll;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
padding: 10px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Content Generation Chat</h1>
|
||||||
|
<div id="chat-log"></div>
|
||||||
|
<input type="text" id="agenda-input" placeholder="Enter a topic or agenda">
|
||||||
|
<button onclick="generateContent()">Generate Content</button>
|
||||||
|
</body>
|
||||||
|
</html>
|
30
templates/index.html
Normal file
30
templates/index.html
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Content Generation</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Content Generation</h1>
|
||||||
|
<form method="post">
|
||||||
|
<label for="agenda">Enter a topic or agenda:</label>
|
||||||
|
<input type="text" id="agenda" name="agenda" required>
|
||||||
|
<button type="submit">Generate Content</button>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
{% if blog_entries %}
|
||||||
|
<h2>Existing Blog Entries</h2>
|
||||||
|
<ul>
|
||||||
|
{% for entry in blog_entries %}
|
||||||
|
{% if entry.blog_post %}
|
||||||
|
<li>
|
||||||
|
|
||||||
|
<a href="/blog/{{ entry._id }}"><h3>{{ entry.agenda }}</h3></a>
|
||||||
|
<pre>{{ entry.blog_post[:100] }}...</pre>
|
||||||
|
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
13
templates/processing.html
Normal file
13
templates/processing.html
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Content Generation Processing</title>
|
||||||
|
<meta http-equiv="refresh" content="5">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Content Generation Processing</h1>
|
||||||
|
<p>Your request for generating content on "{{ agenda }}" is being processed.</p>
|
||||||
|
<p>Task ID: {{ task_id }}</p>
|
||||||
|
<p>This page will refresh every 5 seconds to check the status.</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
13
templates/result.html
Normal file
13
templates/result.html
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Content Generation Result</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Content Generation Result</h1>
|
||||||
|
<h2>Report</h2>
|
||||||
|
{{ report|safe }}
|
||||||
|
<!-- <h2>Blog Post</h2>
|
||||||
|
<pre>{{ blog_post }}</pre> -->
|
||||||
|
</body>
|
||||||
|
</html>
|
Loading…
Reference in New Issue
Block a user