Skip to content

Commit

Permalink
docs: backticks everywhere
Browse files Browse the repository at this point in the history
  • Loading branch information
wochinge committed Mar 5, 2024
1 parent cb28fee commit 7625c8c
Show file tree
Hide file tree
Showing 10 changed files with 16 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def run(self, query_embedding: List[float], filters: Optional[Dict[str, Any]] =
:param filters: filters to narrow down the search space.
:param top_k: the maximum number of documents to retrieve.
:returns: a dictionary with the following keys:
- documents: A list of documents retrieved from the AstraDocumentStore.
- `documents`: A list of documents retrieved from the AstraDocumentStore.
"""

if not top_k:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def run(
:param top_k: The maximum number of documents to retrieve.
If not specified, the default value from the constructor is used.
:return: A dictionary with the following keys:
- "documents": List of documents returned by the search engine.
- `documents`: List of documents returned by the search engine.
:raises ValueError: If the specified document store is not found or is not a MemoryDocumentStore instance.
"""
Expand Down Expand Up @@ -120,7 +120,7 @@ def run(
:param query_embedding: the query embeddings.
:return: a dictionary with the following keys:
- "documents": List of documents returned by the search engine.
- `documents`: List of documents returned by the search engine.
"""
top_k = top_k or self.top_k

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,8 @@ def run(self, text: str):
:param text: the text to embed.
:returns: A dictionary with the following keys:
- "embedding": the embedding of the text.
- "meta": metadata about the request.
- `embedding`: the embedding of the text.
- `meta`: metadata about the request.
:raises TypeError: If the input is not a string.
"""
if not isinstance(text, str):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def run(self, messages: List[ChatMessage], generation_kwargs: Optional[Dict[str,
For more details on the parameters supported by the Cohere API, refer to the
Cohere [documentation](https://docs.cohere.com/reference/chat).
:returns: A dictionary with the following keys:
- "replies": a list of `ChatMessage` instances representing the generated responses.
- `replies`: a list of `ChatMessage` instances representing the generated responses.
"""
# update generation kwargs by merging with the generation kwargs passed to the run method
generation_kwargs = {**self.generation_kwargs, **(generation_kwargs or {})}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,8 @@ def run(self, prompt: str):
:param prompt: the prompt to be sent to the generative model.
:returns: A dictionary with the following keys:
- "replies": the list of replies generated by the model.
- "meta": metadata about the request.
- `replies`: the list of replies generated by the model.
- `meta`: metadata about the request.
"""
response = self.client.generate(
model=self.model, prompt=prompt, stream=self.streaming_callback is not None, **self.model_parameters
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def run(self, prompt: str, negative_prompt: Optional[str] = None):
:param negative_prompt: A description of what you want to omit in
the generated images.
:returns: A dictionary with the following keys:
- images: A list of ByteStream objects, each containing an image.
- `images`: A list of ByteStream objects, each containing an image.
"""
negative_prompt = negative_prompt or self._kwargs.get("negative_prompt")
res = self._model.generate_images(prompt=prompt, negative_prompt=negative_prompt, **self._kwargs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def run(self, image: ByteStream, question: str):
:param image: The image to ask the question about.
:param question: The question to ask.
:returns: A dictionary with the following keys:
- answers: A list of answers to the question.
- `answers`: A list of answers to the question.
"""
answers = self._model.ask_question(image=Image(image.data), question=question, **self._kwargs)
return {"answers": answers}
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,10 @@ def run(self, prompt: str):
:param prompt: The prompt to use for text generation.
:returns: A dictionary with the following keys:
- answers: A list of generated answers.
- safety_attributes: A dictionary with the [safety scores](https://cloud.google.com/vertex-ai/generative-ai/docs/learn/responsible-ai#safety_attribute_descriptions)
- `answers`: A list of generated answers.
- `safety_attributes`: A dictionary with the [safety scores](https://cloud.google.com/vertex-ai/generative-ai/docs/learn/responsible-ai#safety_attribute_descriptions)
of each answer.
- citations: A list of citations for each answer.
- `citations`: A list of citations for each answer.
"""
res = self._model.predict(prompt=prompt, **self._kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ def run(self, prompt: str, generation_kwargs: Optional[Dict[str, Any]] = None):
For more information on the available kwargs, see
[llama.cpp documentation](https://llama-cpp-python.readthedocs.io/en/latest/api-reference/#llama_cpp.Llama.create_completion).
:returns: A dictionary with the following keys:
- "replies": the list of replies generated by the model.
- "meta": metadata about the request.
- `replies`: the list of replies generated by the model.
- `meta`: metadata about the request.
"""
if self.model is None:
error_msg = "The model has not been loaded. Please call warm_up() before running."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def run(
(same metadata for all files).
:returns: A dictionary with the following key:
- "documents": List of Haystack Documents.
- `documents`: List of Haystack Documents.
:raises ValueError: If `meta` is a list and `paths` contains directories.
"""
Expand Down

0 comments on commit 7625c8c

Please sign in to comment.