Add your description here
Project description
Intro
This is a MCP server for manipulating harmonyOS Device.
https://github.com/user-attachments/assets/7af7f5af-e8c6-4845-8d92-cd0ab30bfe17
Quick Start
Installation
- Clone this repo
git clone https://github.com/XixianLiang/HarmonyOS-mcp-server.git
cd HarmonyOS-mcp-server
- Setup the envirnment.
uv python install 3.13
uv sync
Usage
1.Claude Desktop
You can use Claude Desktop to try our tool.
2.Openai SDK
You can also use openai-agents SDK to try the mcp server. Here's an example
"""
Example: Use Openai-agents SDK to call HarmonyOS-mcp-server
"""
import asyncio
import os
from agents import Agent, Runner, gen_trace_id, trace
from agents.mcp import MCPServerStdio, MCPServer
async def run(mcp_server: MCPServer):
agent = Agent(
name="Assistant",
instructions="Use the tools to manipulate the HarmonyOS device and finish the task.",
mcp_servers=[mcp_server],
)
message = "Launch the app `settings` on the phone"
print(f"Running: {message}")
result = await Runner.run(starting_agent=agent, input=message)
print(result.final_output)
async def main():
# Use async context manager to initialize the server
async with MCPServerStdio(
params={
"command": "<...>/bin/uv",
"args": [
"--directory",
"<...>/harmonyos-mcp-server",
"run",
"server.py"
]
}
) as server:
trace_id = gen_trace_id()
with trace(workflow_name="MCP HarmonyOS", trace_id=trace_id):
print(f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}\n")
await run(server)
if __name__ == "__main__":
asyncio.run(main())
3.Langchain
You can use LangGraph, a flexible LLM agent framework to design your workflows. Here's an example
"""
langgraph_mcp.py
"""
server_params = StdioServerParameters(
command="/home/chad/.local/bin/uv",
args=["--directory",
".",
"run",
"server.py"],
)
#This fucntion would use langgraph to build your own agent workflow
async def create_graph(session):
llm = ChatOllama(model="qwen2.5:7b", temperature=0)
#!!!load_mcp_tools is a langchain package function that integrates the mcp into langchain.
#!!!bind_tools fuction enable your llm to access your mcp tools
tools = await load_mcp_tools(session)
llm_with_tool = llm.bind_tools(tools)
system_prompt = await load_mcp_prompt(session, "system_prompt")
prompt_template = ChatPromptTemplate.from_messages([
("system", system_prompt[0].content),
MessagesPlaceholder("messages")
])
chat_llm = prompt_template | llm_with_tool
# State Management
class State(TypedDict):
messages: Annotated[List[AnyMessage], add_messages]
# Nodes
def chat_node(state: State) -> State:
state["messages"] = chat_llm.invoke({"messages": state["messages"]})
return state
# Building the graph
# graph is like a workflow of your agent.
#If you want to know more langgraph basic,reference this link (https://langchain-ai.github.io/langgraph/tutorials/get-started/1-build-basic-chatbot/#3-add-a-node)
graph_builder = StateGraph(State)
graph_builder.add_node("chat_node", chat_node)
graph_builder.add_node("tool_node", ToolNode(tools=tools))
graph_builder.add_edge(START, "chat_node")
graph_builder.add_conditional_edges("chat_node", tools_condition, {"tools": "tool_node", "__end__": END})
graph_builder.add_edge("tool_node", "chat_node")
graph = graph_builder.compile(checkpointer=MemorySaver())
return graph
async def main():
async with stdio_client(server_params) as (read, write):
async with ClientSession(read, write) as session:
await session.initialize()
config = RunnableConfig(thread_id=1234,recursion_limit=15)
# Use the MCP Server in the graph
agent = await create_graph(session)
while True:
message = input("User: ")
try:
response = await agent.ainvoke({"messages": message}, config=config)
print("AI: "+response["messages"][-1].content)
except RecursionError:
result = None
logging.error("Graph recursion limit reached.")
if __name__ == "__main__":
asyncio.run(main())
Write the system prompt in server.py
"""
server.py
"""
@mcp.prompt()
def system_prompt() -> str:
"""System prompt description"""
return """
You are an AI assistant use the tools if needed.
"""
Use load_mcp_prompt function to get your prompt from mcp server.
"""
langgraph_mcp.py
"""
prompts = await load_mcp_prompt(session, "system_prompt")
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
Built Distribution
Filter files by name, interpreter, ABI, and platform.
If you're not sure about the file name format, learn more about wheel file names.
Copy a direct link to the current filters
File details
Details for the file iflow_mcp_xixianliang_harmonyos_mcp_server-0.1.1.tar.gz.
File metadata
- Download URL: iflow_mcp_xixianliang_harmonyos_mcp_server-0.1.1.tar.gz
- Upload date:
- Size: 5.6 kB
- Tags: Source
- Uploaded using Trusted Publishing? No
- Uploaded via: uv/0.10.0 {"installer":{"name":"uv","version":"0.10.0","subcommand":["publish"]},"python":null,"implementation":{"name":null,"version":null},"distro":{"name":"Debian GNU/Linux","version":"13","id":"trixie","libc":null},"system":{"name":null,"release":null},"cpu":null,"openssl_version":null,"setuptools_version":null,"rustc_version":null,"ci":null}
File hashes
| Algorithm | Hash digest | |
|---|---|---|
| SHA256 |
55f40b2176bebbef8aba8b136baaa97c919271a1606cbffca09adab72bee54c3
|
|
| MD5 |
331cb2ed9c0112d9de2c026ae54bad79
|
|
| BLAKE2b-256 |
50864f076ca336d5319a90f0b2272d94ca45978b07487322a39b7095a356c41f
|
File details
Details for the file iflow_mcp_xixianliang_harmonyos_mcp_server-0.1.1-py3-none-any.whl.
File metadata
- Download URL: iflow_mcp_xixianliang_harmonyos_mcp_server-0.1.1-py3-none-any.whl
- Upload date:
- Size: 7.1 kB
- Tags: Python 3
- Uploaded using Trusted Publishing? No
- Uploaded via: uv/0.10.0 {"installer":{"name":"uv","version":"0.10.0","subcommand":["publish"]},"python":null,"implementation":{"name":null,"version":null},"distro":{"name":"Debian GNU/Linux","version":"13","id":"trixie","libc":null},"system":{"name":null,"release":null},"cpu":null,"openssl_version":null,"setuptools_version":null,"rustc_version":null,"ci":null}
File hashes
| Algorithm | Hash digest | |
|---|---|---|
| SHA256 |
199ca92862dbd3269954b46db0bb7ce0c958390fda8e4ed0fe7de6b3a876f5c8
|
|
| MD5 |
404ed83f5285567c5473c6dd2c08f7fe
|
|
| BLAKE2b-256 |
2930ff6a735defa402cf09d1904b8be50524f9216ad0de77d833f556e4eb0377
|