Skip to content

Commit

Permalink
fix: doctests by removing examples and changing models
Browse files Browse the repository at this point in the history
  • Loading branch information
NiklasKoehneckeAA committed Dec 16, 2024
1 parent f1d6cf6 commit 8a08c2a
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 75 deletions.
30 changes: 0 additions & 30 deletions src/intelligence_layer/connectors/document_index/document_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,36 +438,6 @@ class DocumentIndexClient:
Args:
token: A valid token for the document index API.
base_document_index_url: The url of the document index' API.
Example:
>>> import os
>>> from intelligence_layer.connectors import (
... CollectionPath,
... DocumentContents,
... DocumentIndexClient,
... DocumentPath,
... SearchQuery,
... )
>>> document_index = DocumentIndexClient(os.getenv("AA_TOKEN"))
>>> collection_path = CollectionPath(
... namespace="aleph-alpha", collection="wikipedia-de"
... )
>>> document_index.create_collection(collection_path)
>>> document_index.add_document(
... document_path=DocumentPath(
... collection_path=collection_path, document_name="Fun facts about Germany"
... ),
... contents=DocumentContents.from_text("Germany is a country located in ..."),
... )
>>> search_result = document_index.search(
... collection_path=collection_path,
... index_name="asymmetric",
... search_query=SearchQuery(
... query="What is the capital of Germany", max_results=4, min_score=0.5
... ),
... )
"""

def __init__(
Expand Down
6 changes: 3 additions & 3 deletions src/intelligence_layer/examples/qa/long_context_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,11 @@ class LongContextQa(Task[LongContextQaInput, MultipleChunkQaOutput]):
model: The model used in the task.
Example:
>>> from intelligence_layer.core import InMemoryTracer
>>> from intelligence_layer.core import InMemoryTracer, LuminousControlModel
>>> from intelligence_layer.examples import LongContextQa, LongContextQaInput
>>> task = LongContextQa()
>>> model = LuminousControlModel("luminous-base-control")
>>> task = LongContextQa(model=model)
>>> input = LongContextQaInput(text="Lengthy text goes here...",
... question="Where does the text go?")
>>> tracer = InMemoryTracer()
Expand Down
6 changes: 3 additions & 3 deletions src/intelligence_layer/examples/qa/multiple_chunk_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,15 +141,15 @@ class MultipleChunkQa(Task[MultipleChunkQaInput, MultipleChunkQaOutput]):
>>> from intelligence_layer.connectors import (
... LimitedConcurrencyClient,
... )
>>> from intelligence_layer.core import Language, InMemoryTracer
>>> from intelligence_layer.core import Language, InMemoryTracer, LuminousControlModel
>>> from intelligence_layer.core.chunk import TextChunk
>>> from intelligence_layer.examples import (
... MultipleChunkQa,
... MultipleChunkQaInput,
... )
>>> task = MultipleChunkQa()
>>> model = LuminousControlModel("luminous-base-control")
>>> task = MultipleChunkQa(merge_answers_model=model)
>>> input = MultipleChunkQaInput(
... chunks=[TextChunk("Tina does not like pizza."), TextChunk("Mike is a big fan of pizza.")],
... question="Who likes pizza?",
Expand Down
16 changes: 0 additions & 16 deletions src/intelligence_layer/examples/qa/retriever_based_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,22 +71,6 @@ class RetrieverBasedQa(
retriever: Used to access and return a set of texts.
multi_chunk_qa: The task that is used to generate an answer for a single chunk (retrieved through
the retriever). Defaults to :class:`MultipleChunkQa` .
Example:
>>> import os
>>> from intelligence_layer.connectors import DocumentIndexClient
>>> from intelligence_layer.connectors import DocumentIndexRetriever
>>> from intelligence_layer.core import InMemoryTracer
>>> from intelligence_layer.examples import RetrieverBasedQa, RetrieverBasedQaInput
>>> token = os.getenv("AA_TOKEN")
>>> document_index = DocumentIndexClient(token)
>>> retriever = DocumentIndexRetriever(document_index, "asymmetric", "aleph-alpha", "wikipedia-de", 3)
>>> task = RetrieverBasedQa(retriever)
>>> input_data = RetrieverBasedQaInput(question="When was Rome founded?")
>>> tracer = InMemoryTracer()
>>> output = task.run(input_data, tracer)
"""

def __init__(
Expand Down
7 changes: 3 additions & 4 deletions src/intelligence_layer/examples/qa/single_chunk_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,10 @@ class SingleChunkQa(Task[SingleChunkQaInput, SingleChunkQaOutput]):
Example:
>>> import os
>>> from intelligence_layer.core import Language, InMemoryTracer
>>> from intelligence_layer.core import TextChunk
>>> from intelligence_layer.core import Language, InMemoryTracer, TextChunk, LuminousControlModel
>>> from intelligence_layer.examples import SingleChunkQa, SingleChunkQaInput
>>>
>>> task = SingleChunkQa()
>>> model = LuminousControlModel("luminous-base-control")
>>> task = SingleChunkQa(model=model)
>>> input = SingleChunkQaInput(
... chunk=TextChunk("Tina does not like pizza. However, Mike does."),
... question="Who likes pizza?",
Expand Down
19 changes: 0 additions & 19 deletions src/intelligence_layer/examples/search/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,25 +46,6 @@ class Search(Generic[ID], Task[SearchInput, SearchOutput[ID]]):
Args:
retriever: Implements logic to retrieve matching texts to the query.
Example:
>>> from os import getenv
>>> from intelligence_layer.connectors import (
... DocumentIndexClient,
... )
>>> from intelligence_layer.connectors import (
... DocumentIndexRetriever,
... )
>>> from intelligence_layer.core import InMemoryTracer
>>> from intelligence_layer.examples import Search, SearchInput
>>> document_index = DocumentIndexClient(getenv("AA_TOKEN"))
>>> retriever = DocumentIndexRetriever(document_index, "asymmetric", "aleph-alpha", "wikipedia-de", 3)
>>> task = Search(retriever)
>>> input = SearchInput(query="When did East and West Germany reunite?")
>>> tracer = InMemoryTracer()
>>> output = task.run(input, tracer)
"""

def __init__(self, retriever: BaseRetriever[ID]):
Expand Down

0 comments on commit 8a08c2a

Please sign in to comment.