Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/package_refactoring' into packag…
Browse files Browse the repository at this point in the history
…e_refactoring
  • Loading branch information
rotemweiss57 committed Nov 13, 2023
2 parents 09a09cb + ef794cf commit 2ef7523
Show file tree
Hide file tree
Showing 11 changed files with 14 additions and 12 deletions.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
import os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))

from examples.permchain_example.researcher import Researcher
from examples.permchain_example.editor_actors.editor import EditorActor
from examples.permchain_example.reviser_actors.reviser import ReviserActor
from examples.permchain_example.search_actors.gpt_researcher import GPTResearcherActor
from examples.permchain_example.writer_actors.writer import WriterActor
from examples.permchain_example.research_team import ResearchTeam
from examples.permchain_agents.researcher import Researcher
from examples.permchain_agents.editor_actors.editor import EditorActor
from examples.permchain_agents.reviser_actors.reviser import ReviserActor
from examples.permchain_agents.search_actors.gpt_researcher import GPTResearcherActor
from examples.permchain_agents.writer_actors.writer import WriterActor
from examples.permchain_agents.research_team import ResearchTeam
from scraping.processing.text import md_to_pdf


Expand Down
4 changes: 2 additions & 2 deletions gpt_researcher/master/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,9 @@ async def run_sub_query(self, sub_query):
new_search_urls = await self.get_new_urls([url.get("href") for url in search_results])

# Scrape Urls
await stream_output("logs", f"📝 Summarizing sources...\n", self.websocket)
# await stream_output("logs", f"📝Scraping urls {new_search_urls}...\n", self.websocket)
content = scrape_urls(new_search_urls, self.cfg)

await stream_output("logs", f"🤔Researching for relevant information...\n", self.websocket)
# Summarize Raw Data
summary = await summarize(query=sub_query, content=content, agent_role_prompt=self.role, cfg=self.cfg, websocket=self.websocket)

Expand Down
10 changes: 6 additions & 4 deletions gpt_researcher/master/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ async def generate_report(query, context, agent_role_prompt, report_type, websoc
return report


async def stream_output(type, output, websocket=None):
async def stream_output(type, output, websocket=None, logging=True):
"""
Streams output to the websocket
Args:
Expand All @@ -230,6 +230,8 @@ async def stream_output(type, output, websocket=None):
Returns:
None
"""
if not websocket:
return print(output)
await websocket.send_json({"type": type, "output": output})
if not websocket or logging:
print(output)

if websocket:
await websocket.send_json({"type": type, "output": output})

0 comments on commit 2ef7523

Please sign in to comment.