Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 38 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -119,4 +119,42 @@ result = desearch.twitter_replies_post(post_id="123456789", count=10, query="Bit
print(result)
```

### 5. Web Crawl

You can also crawl the website with a url:

```python
from desearch_py import Desearch

# Initialize the Desearch client with your API key
desearch = Desearch(api_key="your-api-key")

# Perform a web crawl
result = desearch.web_crawl("https://docs.desearch.ai/docs/desearch-api")

# Print the web content
print(result)
```

### 6. Deep reearch

```python
from desearch_py import Desearch

# Initialize the Desearch client with your API key
desearch = Desearch(api_key="your-api-key")

# Perform an AI search
result = desearch.deep_research(
prompt="Bittensor",
tools=["web", "hackernews", "reddit", "wikipedia", "youtube", "twitter", "arxiv"],
date_filter="PAST_24_HOURS",
streaming=False,
system_message=""
)

# Print the search results
print(result)
```

These examples demonstrate how to use the Desearch SDK to perform various types of searches. You can customize the queries and parameters to suit your specific needs. Make sure to replace `"your-api-key"` with your actual API key to authenticate your requests.
57 changes: 57 additions & 0 deletions deep_research.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Import the Desearch library
from itertools import count
from desearch_py import Desearch

# Initialize the Desearch client with your API key
# Replace 'your-api-key' with your actual API key
desearch = Desearch(api_key="your-api-key")

"""
This section of the code defines the configuration options for the Deep research using the Desearch client.

Attributes:
prompt (str): The search query to be used in the Deep research.

tools (list of str): A list of tools available for conducting the search. These include:
- "web Search"
- "hackernews"
- "reddit"
- "wikipedia"
- "youtube"
- "twitter"
- "arxiv"

date_filter (str): The time range for filtering search results. Options include:
- "PAST_24_HOURS"
- "PAST_2_DAYS"
- "PAST_WEEK"
- "PAST_2_WEEKS"
- "PAST_MONTH"
- "PAST_2_MONTHS"
- "PAST_YEAR"
- "PAST_2_YEARS"

streaming (bool): A flag indicating whether to stream the search results.

system_message (str): The system message to be used for the Deep research.
"""

# Perform an Deep research using the Desearch client
result = desearch.deep_research(
prompt="Bittensor", # The search query
tools=[
"web",
"hackernews",
"reddit",
"wikipedia",
"youtube",
"twitter",
"arxiv",
], # List of tools to use for the search
date_filter="PAST_24_HOURS", # Filter results from the past 24 hours
streaming=False, # Whether to stream results
system_message="",
)

# Print the search results
print(result)
19 changes: 19 additions & 0 deletions web_crawl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Import the Desearch library
from desearch_py import Desearch

# Initialize the Desearch client with your API key
# Replace 'your-api-key' with your actual API key
desearch = Desearch(api_key="your-api-key")

"""
This section of the code defines the configuration options for performing a Web crawl using the Desearch client.

Attributes:
url (str): The url of the website to crawl
"""

# Perform an Web crawl using the Desearch client
result = desearch.web_crawl("https://docs.desearch.ai/docs/desearch-api")

# Print the web content
print(result)