Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor!: change import paths #253

Merged
merged 1 commit into from
Jan 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

This file was deleted.

9 changes: 6 additions & 3 deletions integrations/instructor_embedders/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ Documentation = "https://github.com/deepset-ai/haystack-core-integrations/tree/m
Issues = "https://github.com/deepset-ai/haystack-core-integrations/issues"
Source = "https://github.com/deepset-ai/haystack-core-integrations/tree/main/integrations/instructor_embedders"

[tool.hatch.build.targets.wheel]
packages = ["src/haystack_integrations"]

[tool.hatch.version]
source = "vcs"
tag-pattern = 'integrations\/instructor_embedders-v(?P<version>.*)'
Expand Down Expand Up @@ -81,7 +84,7 @@ dependencies = [
"ruff>=0.0.243",
]
[tool.hatch.envs.lint.scripts]
typing = "mypy --install-types --non-interactive {args:instructor_embedders_haystack tests}"
typing = "mypy --install-types --non-interactive --explicit-package-bases {args:src/ tests}"
style = [
"ruff {args:.}",
"black --check --diff {args:.}",
Expand All @@ -99,7 +102,6 @@ all = [
[tool.coverage.run]
branch = true
parallel = true
omit = ["instructor_embedders/__about__.py"]

[tool.coverage.report]
exclude_lines = ["no cov", "if __name__ == .__main__.:", "if TYPE_CHECKING:"]
Expand Down Expand Up @@ -152,7 +154,7 @@ unfixable = [
known-first-party = ["instructor_embedders"]

[tool.ruff.flake8-tidy-imports]
ban-relative-imports = "all"
ban-relative-imports = "parents"

[tool.ruff.per-file-ignores]
# Tests can use magic values, assertions, and relative imports
Expand All @@ -172,6 +174,7 @@ module = [
"instructor_embedders_haystack.*",
"InstructorEmbedding.*",
"haystack.*",
"haystack_integrations.*",
"pytest.*",
"numpy.*",
]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# SPDX-FileCopyrightText: 2023-present deepset GmbH <[email protected]>
#
# SPDX-License-Identifier: Apache-2.0
from .instructor_document_embedder import InstructorDocumentEmbedder
from .instructor_text_embedder import InstructorTextEmbedder

__all__ = ["InstructorDocumentEmbedder", "InstructorTextEmbedder"]
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from haystack import Document, component, default_from_dict, default_to_dict

from instructor_embedders_haystack.embedding_backend.instructor_backend import _InstructorEmbeddingBackendFactory
from .embedding_backend.instructor_backend import _InstructorEmbeddingBackendFactory


@component
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from haystack import component, default_from_dict, default_to_dict

from instructor_embedders_haystack.embedding_backend.instructor_backend import _InstructorEmbeddingBackendFactory
from .embedding_backend.instructor_backend import _InstructorEmbeddingBackendFactory


@component
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
from unittest.mock import patch

from instructor_embedders_haystack.embedding_backend.instructor_backend import _InstructorEmbeddingBackendFactory
from haystack_integrations.components.embedders.instructor_embedders.embedding_backend.instructor_backend import (
_InstructorEmbeddingBackendFactory,
)


@patch("instructor_embedders_haystack.embedding_backend.instructor_backend.INSTRUCTOR")
@patch(
"haystack_integrations.components.embedders.instructor_embedders.embedding_backend.instructor_backend.INSTRUCTOR"
)
def test_factory_behavior(mock_instructor): # noqa: ARG001
embedding_backend = _InstructorEmbeddingBackendFactory.get_embedding_backend(
model_name_or_path="hkunlp/instructor-large", device="cpu"
Expand All @@ -20,7 +24,9 @@ def test_factory_behavior(mock_instructor): # noqa: ARG001
_InstructorEmbeddingBackendFactory._instances = {}


@patch("instructor_embedders_haystack.embedding_backend.instructor_backend.INSTRUCTOR")
@patch(
"haystack_integrations.components.embedders.instructor_embedders.embedding_backend.instructor_backend.INSTRUCTOR"
)
def test_model_initialization(mock_instructor):
_InstructorEmbeddingBackendFactory.get_embedding_backend(
model_name_or_path="hkunlp/instructor-base", device="cpu", use_auth_token="huggingface_auth_token"
Expand All @@ -32,7 +38,9 @@ def test_model_initialization(mock_instructor):
_InstructorEmbeddingBackendFactory._instances = {}


@patch("instructor_embedders_haystack.embedding_backend.instructor_backend.INSTRUCTOR")
@patch(
"haystack_integrations.components.embedders.instructor_embedders.embedding_backend.instructor_backend.INSTRUCTOR"
)
def test_embedding_function_with_kwargs(mock_instructor): # noqa: ARG001
embedding_backend = _InstructorEmbeddingBackendFactory.get_embedding_backend(
model_name_or_path="hkunlp/instructor-base"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@
import numpy as np
import pytest
from haystack import Document

from instructor_embedders_haystack.instructor_document_embedder import InstructorDocumentEmbedder
from haystack_integrations.components.embedders.instructor_embedders import InstructorDocumentEmbedder


class TestInstructorDocumentEmbedder:
Expand Down Expand Up @@ -55,7 +54,7 @@ def test_to_dict(self):
embedder = InstructorDocumentEmbedder(model="hkunlp/instructor-base")
embedder_dict = embedder.to_dict()
assert embedder_dict == {
"type": "instructor_embedders_haystack.instructor_document_embedder.InstructorDocumentEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_document_embedder.InstructorDocumentEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cpu",
Expand Down Expand Up @@ -86,7 +85,7 @@ def test_to_dict_with_custom_init_parameters(self):
)
embedder_dict = embedder.to_dict()
assert embedder_dict == {
"type": "instructor_embedders_haystack.instructor_document_embedder.InstructorDocumentEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_document_embedder.InstructorDocumentEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cuda",
Expand All @@ -105,7 +104,7 @@ def test_from_dict(self):
Test deserialization of InstructorDocumentEmbedder from a dictionary, using default initialization parameters.
"""
embedder_dict = {
"type": "instructor_embedders_haystack.instructor_document_embedder.InstructorDocumentEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_document_embedder.InstructorDocumentEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cpu",
Expand Down Expand Up @@ -134,7 +133,7 @@ def test_from_dict_with_custom_init_parameters(self):
Test deserialization of InstructorDocumentEmbedder from a dictionary, using custom initialization parameters.
"""
embedder_dict = {
"type": "instructor_embedders_haystack.instructor_document_embedder.InstructorDocumentEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_document_embedder.InstructorDocumentEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cuda",
Expand All @@ -158,7 +157,9 @@ def test_from_dict_with_custom_init_parameters(self):
assert embedder.meta_fields_to_embed == ["test_field"]
assert embedder.embedding_separator == " | "

@patch("instructor_embedders_haystack.instructor_document_embedder._InstructorEmbeddingBackendFactory")
@patch(
"haystack_integrations.components.embedders.instructor_embedders.instructor_document_embedder._InstructorEmbeddingBackendFactory"
)
def test_warmup(self, mocked_factory):
"""
Test for checking embedder instances after warm-up.
Expand All @@ -170,7 +171,9 @@ def test_warmup(self, mocked_factory):
model_name_or_path="hkunlp/instructor-base", device="cpu", use_auth_token=None
)

@patch("instructor_embedders_haystack.instructor_document_embedder._InstructorEmbeddingBackendFactory")
@patch(
"haystack_integrations.components.embedders.instructor_embedders.instructor_document_embedder._InstructorEmbeddingBackendFactory"
)
def test_warmup_does_not_reload(self, mocked_factory):
"""
Test for checking backend instances after multiple warm-ups.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@

import numpy as np
import pytest

from instructor_embedders_haystack.instructor_text_embedder import InstructorTextEmbedder
from haystack_integrations.components.embedders.instructor_embedders import InstructorTextEmbedder


class TestInstructorTextEmbedder:
Expand Down Expand Up @@ -48,7 +47,7 @@ def test_to_dict(self):
embedder = InstructorTextEmbedder(model="hkunlp/instructor-base")
embedder_dict = embedder.to_dict()
assert embedder_dict == {
"type": "instructor_embedders_haystack.instructor_text_embedder.InstructorTextEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_text_embedder.InstructorTextEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cpu",
Expand All @@ -75,7 +74,7 @@ def test_to_dict_with_custom_init_parameters(self):
)
embedder_dict = embedder.to_dict()
assert embedder_dict == {
"type": "instructor_embedders_haystack.instructor_text_embedder.InstructorTextEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_text_embedder.InstructorTextEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cuda",
Expand All @@ -92,7 +91,7 @@ def test_from_dict(self):
Test deserialization of InstructorTextEmbedder from a dictionary, using default initialization parameters.
"""
embedder_dict = {
"type": "instructor_embedders_haystack.instructor_text_embedder.InstructorTextEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_text_embedder.InstructorTextEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cpu",
Expand All @@ -117,7 +116,7 @@ def test_from_dict_with_custom_init_parameters(self):
Test deserialization of InstructorTextEmbedder from a dictionary, using custom initialization parameters.
"""
embedder_dict = {
"type": "instructor_embedders_haystack.instructor_text_embedder.InstructorTextEmbedder",
"type": "haystack_integrations.components.embedders.instructor_embedders.instructor_text_embedder.InstructorTextEmbedder", # noqa
"init_parameters": {
"model": "hkunlp/instructor-base",
"device": "cuda",
Expand All @@ -137,7 +136,9 @@ def test_from_dict_with_custom_init_parameters(self):
assert embedder.progress_bar is False
assert embedder.normalize_embeddings is True

@patch("instructor_embedders_haystack.instructor_text_embedder._InstructorEmbeddingBackendFactory")
@patch(
"haystack_integrations.components.embedders.instructor_embedders.instructor_text_embedder._InstructorEmbeddingBackendFactory"
)
def test_warmup(self, mocked_factory):
"""
Test for checking embedder instances after warm-up.
Expand All @@ -149,7 +150,9 @@ def test_warmup(self, mocked_factory):
model_name_or_path="hkunlp/instructor-base", device="cpu", use_auth_token=None
)

@patch("instructor_embedders_haystack.instructor_text_embedder._InstructorEmbeddingBackendFactory")
@patch(
"haystack_integrations.components.embedders.instructor_embedders.instructor_text_embedder._InstructorEmbeddingBackendFactory"
)
def test_warmup_does_not_reload(self, mocked_factory):
"""
Test for checking backend instances after multiple warm-ups.
Expand Down