admin管理员组文章数量:1242825
i am trying to implement a model using farm-haystack, however am having a dependency mismatch for the following libraries : transformers farm-haystack langchain pydantic fastapi uvicorn elasticsearch python-multipart, currently i have 2 versions of python installed on my machine (3.12 and 3.11.10), all facing the same challenges. I need help on the proper version for both dependencies and python version which works better for these
from this implementation:
import os
from typing import List
from haystack.document_stores import InMemoryDocumentStore
from haystack.nodes import PreProcessor, BM25Retriever, FARMReader
# Initialize an in-memory document store (replaceable with Elasticsearch)
document_store = InMemoryDocumentStore()
# Folder where uploaded documents are stored
UPLOAD_FOLDER = "uploaded_docs"
# Ensure the upload folder exists
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
def list_documents() -> List[str]:
"""List all uploaded documents."""
try:
return os.listdir(UPLOAD_FOLDER)
except FileNotFoundError:
raise RuntimeError(f"Upload folder '{UPLOAD_FOLDER}' not found. Please create it.")
def read_document(file_path: str) -> str:
"""Read the content of a document."""
try:
with open(file_path, "r", encoding="utf-8") as f:
return f.read()
except Exception as e:
raise RuntimeError(f"Error reading file '{file_path}': {str(e)}")
def preprocess_document(content: str) -> List[dict]:
"""Preprocess the document content into smaller chunks for indexing."""
preprocessor = PreProcessor(
split_by="word", # Split the content into chunks by word count
split_length=200, # Chunk size
split_overlap=20, # Overlap between chunks
split_respect_sentence_boundary=True,
)
return preprocessor.process({"content": content})
def index_document(file_name: str):
"""Read, preprocess, and index a document."""
file_path = os.path.join(UPLOAD_FOLDER, file_name)
if not os.path.isfile(file_path):
raise RuntimeError(f"File '{file_name}' not found in '{UPLOAD_FOLDER}'.")
content = read_document(file_path)
chunks = preprocess_document(content)
# Prepare chunks in Haystack-compatible format
formatted_chunks = [{"content": chunk["content"]} for chunk in chunks]
document_store.write_documents(formatted_chunks)
return {
"message": f"Document '{file_name}' indexed successfully.",
"chunks_count": len(formatted_chunks),
}
def search_documents(query: str):
"""Search indexed documents using a query."""
retriever = BM25Retriever(document_store=document_store)
reader = FARMReader(model_name_or_path="deepset/roberta-base-squad2", use_gpu=False)
# Retrieve documents
retrieved_docs = retriever.retrieve(query)
if not retrieved_docs:
return {"message": "No relevant documents found."}
# Reader to predict answers from retrieved documents
answers = reader.predict(query=query, documents=retrieved_docs, top_k=3)
# Serialize the results to avoid unsupported types
results = [
{
"answer": ans.answer,
"score": ans.score,
"context": ans.context,
"document_id": ans.document_id,
}
for ans in answers["answers"]
]
return {"results": results}
But i keep getting this error:
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 122, in spawn_main
exitcode = _main(fd, parent_sentinel)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 131, in _main
prepare(preparation_data)
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 244, in prepare
_fixup_main_from_name(data['init_main_from_name'])
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 268, in _fixup_main_from_name
main_content = runpy.run_module(mod_name,
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen runpy>", line 226, in run_module
File "<frozen runpy>", line 98, in _run_module_code
File "<frozen runpy>", line 88, in _run_code
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/app/main.py", line 7, in <module>
from app.views.routes import router
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/app/views/routes.py", line 2, in <module>
from app.services.document_service import list_documents, index_document, search_documents
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/app/services/document_service.py", line 3, in <module>
from haystack.document_stores import InMemoryDocumentStore
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/haystack/__init__.py", line 8, in <module>
from haystack.schema import Document, Answer, Label, MultiLabel, Span, EvaluationResult, TableCell
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/haystack/schema.py", line 42, in <module>
@dataclass
^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/dataclasses.py", line 250, in dataclass
return create_dataclass(_cls)
^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/dataclasses.py", line 241, in create_dataclass
pydantic_complete = _pydantic_dataclassesplete_dataclass(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_dataclasses.py", line 159, in complete_dataclass
schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 502, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 758, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 832, in match_type
return self._dataclass_schema(obj, None)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1561, in _dataclass_schema
args = sorted(
^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1562, in <genexpr>
(self._generate_dc_field_schema(k, v, decorators) for k, v in fields.items()),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 933, in _generate_dc_field_schema
common_field = self._common_field_schema(name, field_info, decorators)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1081, in _common_field_schema
schema = self._apply_annotations(
^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1825, in _apply_annotations
schema = get_inner_schema(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_schema_generation_shared.py", line 82, in __call__
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1806, in inner_handler
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 758, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 840, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 864, in _match_generic_type
return self._union_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1152, in _union_schema
choices.append(self.generate_schema(arg))
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 502, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 758, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 844, in match_type
return self._unknown_type_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 405, in _unknown_type_schema
raise PydanticSchemaGenerationError(
pydantic.errors.PydanticSchemaGenerationError: Unable to generate pydantic-core schema for <class 'pandas.core.frame.DataFrame'>. Set `arbitrary_types_allowed=True` in the model_config to ignore this error or implement `__get_pydantic_core_schema__` on your type to fully support it.
If you got this error by calling handler(<some type>) within `__get_pydantic_core_schema__` then you likely need to call `handler.generate_schema(<some type>)` since we do not call `__get_pydantic_core_schema__` on `<some type>` otherwise to avoid infinite recursion.
For further information visit /2.7/u/schema-for-unknown-type
i am trying to implement a model using farm-haystack, however am having a dependency mismatch for the following libraries : transformers farm-haystack langchain pydantic fastapi uvicorn elasticsearch python-multipart, currently i have 2 versions of python installed on my machine (3.12 and 3.11.10), all facing the same challenges. I need help on the proper version for both dependencies and python version which works better for these
from this implementation:
import os
from typing import List
from haystack.document_stores import InMemoryDocumentStore
from haystack.nodes import PreProcessor, BM25Retriever, FARMReader
# Initialize an in-memory document store (replaceable with Elasticsearch)
document_store = InMemoryDocumentStore()
# Folder where uploaded documents are stored
UPLOAD_FOLDER = "uploaded_docs"
# Ensure the upload folder exists
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
def list_documents() -> List[str]:
"""List all uploaded documents."""
try:
return os.listdir(UPLOAD_FOLDER)
except FileNotFoundError:
raise RuntimeError(f"Upload folder '{UPLOAD_FOLDER}' not found. Please create it.")
def read_document(file_path: str) -> str:
"""Read the content of a document."""
try:
with open(file_path, "r", encoding="utf-8") as f:
return f.read()
except Exception as e:
raise RuntimeError(f"Error reading file '{file_path}': {str(e)}")
def preprocess_document(content: str) -> List[dict]:
"""Preprocess the document content into smaller chunks for indexing."""
preprocessor = PreProcessor(
split_by="word", # Split the content into chunks by word count
split_length=200, # Chunk size
split_overlap=20, # Overlap between chunks
split_respect_sentence_boundary=True,
)
return preprocessor.process({"content": content})
def index_document(file_name: str):
"""Read, preprocess, and index a document."""
file_path = os.path.join(UPLOAD_FOLDER, file_name)
if not os.path.isfile(file_path):
raise RuntimeError(f"File '{file_name}' not found in '{UPLOAD_FOLDER}'.")
content = read_document(file_path)
chunks = preprocess_document(content)
# Prepare chunks in Haystack-compatible format
formatted_chunks = [{"content": chunk["content"]} for chunk in chunks]
document_store.write_documents(formatted_chunks)
return {
"message": f"Document '{file_name}' indexed successfully.",
"chunks_count": len(formatted_chunks),
}
def search_documents(query: str):
"""Search indexed documents using a query."""
retriever = BM25Retriever(document_store=document_store)
reader = FARMReader(model_name_or_path="deepset/roberta-base-squad2", use_gpu=False)
# Retrieve documents
retrieved_docs = retriever.retrieve(query)
if not retrieved_docs:
return {"message": "No relevant documents found."}
# Reader to predict answers from retrieved documents
answers = reader.predict(query=query, documents=retrieved_docs, top_k=3)
# Serialize the results to avoid unsupported types
results = [
{
"answer": ans.answer,
"score": ans.score,
"context": ans.context,
"document_id": ans.document_id,
}
for ans in answers["answers"]
]
return {"results": results}
But i keep getting this error:
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 122, in spawn_main
exitcode = _main(fd, parent_sentinel)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 131, in _main
prepare(preparation_data)
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 244, in prepare
_fixup_main_from_name(data['init_main_from_name'])
File "/usr/lib/python3.11/multiprocessing/spawn.py", line 268, in _fixup_main_from_name
main_content = runpy.run_module(mod_name,
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen runpy>", line 226, in run_module
File "<frozen runpy>", line 98, in _run_module_code
File "<frozen runpy>", line 88, in _run_code
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/app/main.py", line 7, in <module>
from app.views.routes import router
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/app/views/routes.py", line 2, in <module>
from app.services.document_service import list_documents, index_document, search_documents
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/app/services/document_service.py", line 3, in <module>
from haystack.document_stores import InMemoryDocumentStore
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/haystack/__init__.py", line 8, in <module>
from haystack.schema import Document, Answer, Label, MultiLabel, Span, EvaluationResult, TableCell
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/haystack/schema.py", line 42, in <module>
@dataclass
^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/dataclasses.py", line 250, in dataclass
return create_dataclass(_cls)
^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/dataclasses.py", line 241, in create_dataclass
pydantic_complete = _pydantic_dataclassesplete_dataclass(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_dataclasses.py", line 159, in complete_dataclass
schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 502, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 758, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 832, in match_type
return self._dataclass_schema(obj, None)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1561, in _dataclass_schema
args = sorted(
^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1562, in <genexpr>
(self._generate_dc_field_schema(k, v, decorators) for k, v in fields.items()),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 933, in _generate_dc_field_schema
common_field = self._common_field_schema(name, field_info, decorators)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1081, in _common_field_schema
schema = self._apply_annotations(
^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1825, in _apply_annotations
schema = get_inner_schema(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_schema_generation_shared.py", line 82, in __call__
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1806, in inner_handler
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 758, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 840, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 864, in _match_generic_type
return self._union_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 1152, in _union_schema
choices.append(self.generate_schema(arg))
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 502, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 758, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 844, in match_type
return self._unknown_type_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/devoop/Documents/Python Projects/mohcc-ai-tools/mohcc-ai-tools-env/lib/python3.11/site-packages/pydantic/_internal/_generate_schema.py", line 405, in _unknown_type_schema
raise PydanticSchemaGenerationError(
pydantic.errors.PydanticSchemaGenerationError: Unable to generate pydantic-core schema for <class 'pandas.core.frame.DataFrame'>. Set `arbitrary_types_allowed=True` in the model_config to ignore this error or implement `__get_pydantic_core_schema__` on your type to fully support it.
If you got this error by calling handler(<some type>) within `__get_pydantic_core_schema__` then you likely need to call `handler.generate_schema(<some type>)` since we do not call `__get_pydantic_core_schema__` on `<some type>` otherwise to avoid infinite recursion.
For further information visit https://errors.pydantic.dev/2.7/u/schema-for-unknown-type
Share
Improve this question
asked 2 days ago
GeeGee
11 silver badge1 bronze badge
1
- Your code essentially consists only of function definitions; please post a minimal reproducible example. – desertnaut Commented yesterday
1 Answer
Reset to default 0Your problem seems related to the fact that Haystack 1.x (farm-haystack
) is not compatible with pydantic>=2.0.0
.
Apart from that, Haystack 1.x is in maintenance mode and it would be better to switch to Haystack 2.x (haystack-ai
), which is actively developed and maintained.
Some useful docs:
- Haystack 2.x installation guide
- Migration guide
本文标签: nlpPython Farmhaystack DependenciesStack Overflow
版权声明:本文标题:nlp - Python Farm-haystack Dependencies - Stack Overflow 内容由网友自发贡献,该文观点仅代表作者本人, 转载请联系作者并注明出处:http://www.betaflare.com/web/1740073411a2223169.html, 本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容,一经查实,本站将立刻删除。
发表评论