Skip to content

Commit be9bba1

Browse files
committed
mypy fixes
1 parent 1a45c4c commit be9bba1

File tree

12 files changed

+144
-135
lines changed

12 files changed

+144
-135
lines changed

src/neo4j_graphrag/llm/anthropic_llm.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939

4040

4141
# pylint: disable=redefined-builtin, arguments-differ, raise-missing-from, no-else-return
42-
class AnthropicLLM(LLMInterface, LLMInterfaceV2):
42+
class AnthropicLLM(LLMInterface, LLMInterfaceV2): # type: ignore[misc]
4343
"""Interface for large language models on Anthropic
4444
4545
Args:
@@ -85,7 +85,7 @@ def __init__(
8585
self.async_client = anthropic.AsyncAnthropic(**kwargs)
8686

8787
# overloads for LLMInterface and LLMInterfaceV2 methods
88-
@overload
88+
@overload # type: ignore[no-overload-impl]
8989
def invoke(
9090
self,
9191
input: str,
@@ -99,7 +99,7 @@ def invoke(
9999
input: List[LLMMessage],
100100
) -> LLMResponse: ...
101101

102-
@overload
102+
@overload # type: ignore[no-overload-impl]
103103
async def ainvoke(
104104
self,
105105
input: str,
@@ -114,7 +114,7 @@ async def ainvoke(
114114
) -> LLMResponse: ...
115115

116116
# switching logics to LLMInterface or LLMInterfaceV2
117-
def invoke(
117+
def invoke( # type: ignore[no-redef]
118118
self,
119119
input: Union[str, List[LLMMessage]],
120120
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -127,7 +127,7 @@ def invoke(
127127
else:
128128
raise ValueError(f"Invalid input type for invoke method - {type(input)}")
129129

130-
async def ainvoke(
130+
async def ainvoke( # type: ignore[no-redef]
131131
self,
132132
input: Union[str, List[LLMMessage]],
133133
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,

src/neo4j_graphrag/llm/cohere_llm.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343

4444

4545
# pylint: disable=redefined-builtin, arguments-differ, raise-missing-from, no-else-return
46-
class CohereLLM(LLMInterface, LLMInterfaceV2):
46+
class CohereLLM(LLMInterface, LLMInterfaceV2): # type: ignore[misc]
4747
"""Interface for large language models on the Cohere platform
4848
4949
Args:
@@ -87,7 +87,7 @@ def __init__(
8787
self.async_client = cohere.AsyncClientV2(**kwargs)
8888

8989
# overloads for LLMInterface and LLMInterfaceV2 methods
90-
@overload
90+
@overload # type: ignore[no-overload-impl]
9191
def invoke(
9292
self,
9393
input: str,
@@ -101,7 +101,7 @@ def invoke(
101101
input: List[LLMMessage],
102102
) -> LLMResponse: ...
103103

104-
@overload
104+
@overload # type: ignore[no-overload-impl]
105105
async def ainvoke(
106106
self,
107107
input: str,
@@ -116,7 +116,7 @@ async def ainvoke(
116116
) -> LLMResponse: ...
117117

118118
# switching logics to LLMInterface or LLMInterfaceV2
119-
def invoke(
119+
def invoke( # type: ignore[no-redef]
120120
self,
121121
input: Union[str, List[LLMMessage]],
122122
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -129,7 +129,7 @@ def invoke(
129129
else:
130130
raise ValueError(f"Invalid input type for invoke method - {type(input)}")
131131

132-
async def ainvoke(
132+
async def ainvoke( # type: ignore[no-redef]
133133
self,
134134
input: Union[str, List[LLMMessage]],
135135
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,

src/neo4j_graphrag/llm/mistralai_llm.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,9 @@
3939
try:
4040
from mistralai import (
4141
Messages,
42-
UserMessage,
42+
UserMessage as MistralUserMessage,
4343
AssistantMessage,
44-
SystemMessage,
44+
SystemMessage as MistralSystemMessage,
4545
Mistral,
4646
)
4747
from mistralai.models.sdkerror import SDKError
@@ -51,8 +51,7 @@
5151

5252

5353
# pylint: disable=redefined-builtin, arguments-differ, raise-missing-from, no-else-return
54-
class MistralAILLM(LLMInterface):
55-
54+
class MistralAILLM(LLMInterface, LLMInterfaceV2): # type: ignore[misc]
5655
def __init__(
5756
self,
5857
model_name: str,
@@ -82,7 +81,7 @@ def __init__(
8281
self.client = Mistral(api_key=api_key, **kwargs)
8382

8483
# overloads for LLMInterface and LLMInterfaceV2 methods
85-
@overload
84+
@overload # type: ignore[no-overload-impl]
8685
def invoke(
8786
self,
8887
input: str,
@@ -96,7 +95,7 @@ def invoke(
9695
input: List[LLMMessage],
9796
) -> LLMResponse: ...
9897

99-
@overload
98+
@overload # type: ignore[no-overload-impl]
10099
async def ainvoke(
101100
self,
102101
input: str,
@@ -111,7 +110,7 @@ async def ainvoke(
111110
) -> LLMResponse: ...
112111

113112
# switching logics to LLMInterface or LLMInterfaceV2
114-
def invoke(
113+
def invoke( # type: ignore[no-redef]
115114
self,
116115
input: Union[str, List[LLMMessage]],
117116
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -124,7 +123,7 @@ def invoke(
124123
else:
125124
raise ValueError(f"Invalid input type for invoke method - {type(input)}")
126125

127-
async def ainvoke(
126+
async def ainvoke( # type: ignore[no-redef]
128127
self,
129128
input: Union[str, List[LLMMessage]],
130129
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -313,10 +312,10 @@ def get_brand_new_messages(
313312
messages: list[Messages] = []
314313
for m in input:
315314
if m["role"] == "system":
316-
messages.append(SystemMessage(content=m["content"]))
315+
messages.append(MistralSystemMessage(content=m["content"]))
317316
continue
318317
if m["role"] == "user":
319-
messages.append(UserMessage(content=m["content"]))
318+
messages.append(MistralUserMessage(content=m["content"]))
320319
continue
321320
if m["role"] == "assistant":
322321
messages.append(AssistantMessage(content=m["content"]))

src/neo4j_graphrag/llm/ollama_llm.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
# pylint: disable=redefined-builtin, arguments-differ, raise-missing-from, no-else-return
5757

5858

59-
class OllamaLLM(LLMInterface, LLMInterfaceV2):
59+
class OllamaLLM(LLMInterface, LLMInterfaceV2): # type: ignore[misc]
6060
"""LLM wrapper for Ollama models."""
6161

6262
def __init__(
@@ -96,7 +96,7 @@ def __init__(
9696
self.model_params = {"options": self.model_params}
9797

9898
# overloads for LLMInterface and LLMInterfaceV2 methods
99-
@overload
99+
@overload # type: ignore[no-overload-impl]
100100
def invoke(
101101
self,
102102
input: str,
@@ -110,7 +110,7 @@ def invoke(
110110
input: List[LLMMessage],
111111
) -> LLMResponse: ...
112112

113-
@overload
113+
@overload # type: ignore[no-overload-impl]
114114
async def ainvoke(
115115
self,
116116
input: str,
@@ -125,7 +125,7 @@ async def ainvoke(
125125
) -> LLMResponse: ...
126126

127127
# switching logics to LLMInterface or LLMInterfaceV2
128-
def invoke(
128+
def invoke( # type: ignore[no-redef]
129129
self,
130130
input: Union[str, List[LLMMessage]],
131131
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -138,7 +138,7 @@ def invoke(
138138
else:
139139
raise ValueError(f"Invalid input type for invoke method - {type(input)}")
140140

141-
async def ainvoke(
141+
async def ainvoke( # type: ignore[no-redef]
142142
self,
143143
input: Union[str, List[LLMMessage]],
144144
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,

src/neo4j_graphrag/llm/openai_llm.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def __init__(
103103
super().__init__(model_name, model_params, rate_limit_handler)
104104

105105
# overloads for LLMInterface and LLMInterfaceV2 methods
106-
@overload
106+
@overload # type: ignore[no-overload-impl]
107107
def invoke(
108108
self,
109109
input: str,
@@ -117,7 +117,7 @@ def invoke(
117117
input: List[LLMMessage],
118118
) -> LLMResponse: ...
119119

120-
@overload
120+
@overload # type: ignore[no-overload-impl]
121121
async def ainvoke(
122122
self,
123123
input: str,
@@ -131,7 +131,7 @@ async def ainvoke(
131131
input: List[LLMMessage],
132132
) -> LLMResponse: ...
133133

134-
@overload
134+
@overload # type: ignore[no-overload-impl]
135135
def invoke_with_tools(
136136
self,
137137
input: str,
@@ -147,7 +147,7 @@ def invoke_with_tools(
147147
tools: Sequence[Tool], # Tools definition as a sequence of Tool objects
148148
) -> ToolCallResponse: ...
149149

150-
@overload
150+
@overload # type: ignore[no-overload-impl]
151151
async def ainvoke_with_tools(
152152
self,
153153
input: str,
@@ -164,7 +164,7 @@ async def ainvoke_with_tools(
164164
) -> ToolCallResponse: ...
165165

166166
# switching logics to LLMInterface or LLMInterfaceV2
167-
def invoke(
167+
def invoke( # type: ignore[no-redef]
168168
self,
169169
input: Union[str, List[LLMMessage]],
170170
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -177,7 +177,7 @@ def invoke(
177177
else:
178178
raise ValueError(f"Invalid input type for invoke method - {type(input)}")
179179

180-
async def ainvoke(
180+
async def ainvoke( # type: ignore[no-redef]
181181
self,
182182
input: Union[str, List[LLMMessage]],
183183
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -192,7 +192,7 @@ async def ainvoke(
192192
else:
193193
raise ValueError(f"Invalid input type for ainvoke method - {type(input)}")
194194

195-
def invoke_with_tools(
195+
def invoke_with_tools( # type: ignore[no-redef]
196196
self,
197197
input: Union[str, List[LLMMessage]],
198198
tools: Sequence[Tool], # Tools definition as a sequence of Tool objects
@@ -210,7 +210,7 @@ def invoke_with_tools(
210210
f"Invalid input type for invoke_with_tools method - {type(input)}"
211211
)
212212

213-
async def ainvoke_with_tools(
213+
async def ainvoke_with_tools( # type: ignore[no-redef]
214214
self,
215215
input: Union[str, List[LLMMessage]],
216216
tools: Sequence[Tool],

src/neo4j_graphrag/llm/vertexai_llm.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def __init__(
101101
self.options = kwargs
102102

103103
# overloads for LLMInterface and LLMInterfaceV2 methods
104-
@overload
104+
@overload # type: ignore[no-overload-impl]
105105
def invoke(
106106
self,
107107
input: str,
@@ -115,7 +115,7 @@ def invoke(
115115
input: List[LLMMessage],
116116
) -> LLMResponse: ...
117117

118-
@overload
118+
@overload # type: ignore[no-overload-impl]
119119
async def ainvoke(
120120
self,
121121
input: str,
@@ -129,7 +129,7 @@ async def ainvoke(
129129
input: List[LLMMessage],
130130
) -> LLMResponse: ...
131131

132-
@overload
132+
@overload # type: ignore[no-overload-impl]
133133
def invoke_with_tools(
134134
self,
135135
input: str,
@@ -145,7 +145,7 @@ def invoke_with_tools(
145145
tools: Sequence[Tool], # Tools definition as a sequence of Tool objects
146146
) -> ToolCallResponse: ...
147147

148-
@overload
148+
@overload # type: ignore[no-overload-impl]
149149
async def ainvoke_with_tools(
150150
self,
151151
input: str,
@@ -163,7 +163,7 @@ async def ainvoke_with_tools(
163163

164164
# switching logics to LLMInterface or LLMInterfaceV2
165165

166-
def invoke(
166+
def invoke( # type: ignore[no-redef]
167167
self,
168168
input: Union[str, List[LLMMessage]],
169169
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -176,7 +176,7 @@ def invoke(
176176
else:
177177
raise ValueError(f"Invalid input type for invoke method - {type(input)}")
178178

179-
async def ainvoke(
179+
async def ainvoke( # type: ignore[no-redef]
180180
self,
181181
input: Union[str, List[LLMMessage]],
182182
message_history: Optional[Union[List[LLMMessage], MessageHistory]] = None,
@@ -191,7 +191,7 @@ async def ainvoke(
191191
else:
192192
raise ValueError(f"Invalid input type for ainvoke method - {type(input)}")
193193

194-
def invoke_with_tools(
194+
def invoke_with_tools( # type: ignore[no-redef]
195195
self,
196196
input: Union[str, List[LLMMessage]],
197197
tools: Sequence[Tool], # Tools definition as a sequence of Tool objects
@@ -209,7 +209,7 @@ def invoke_with_tools(
209209
f"Invalid input type for invoke_with_tools method - {type(input)}"
210210
)
211211

212-
async def ainvoke_with_tools(
212+
async def ainvoke_with_tools( # type: ignore[no-redef]
213213
self,
214214
input: Union[str, List[LLMMessage]],
215215
tools: Sequence[Tool],

0 commit comments

Comments
 (0)