73 lines
2.2 KiB
Python
Executable File
73 lines
2.2 KiB
Python
Executable File
#!/usr/bin/env python
|
|
"""
|
|
Test Query to Report Script with Electric Vehicles Query
|
|
|
|
This script tests the query_to_report.py script with a query about the impact of electric vehicles.
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import asyncio
|
|
import argparse
|
|
from datetime import datetime
|
|
|
|
# Add parent directory to path to import modules
|
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
from scripts.query_to_report import query_to_report
|
|
|
|
|
|
async def run_ev_test(use_mock: bool = False):
|
|
"""
|
|
Run a test of the query to report workflow with an electric vehicles query.
|
|
|
|
Args:
|
|
use_mock: If True, use mock data instead of making actual API calls
|
|
"""
|
|
# Query about electric vehicles
|
|
query = "What is the environmental and economic impact of electric vehicles compared to traditional vehicles?"
|
|
|
|
# Generate timestamp for unique output file
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
output_file = f"ev_report_{timestamp}.md"
|
|
|
|
print(f"Processing query: {query}")
|
|
print(f"This may take a few minutes depending on the number of search results and API response times...")
|
|
|
|
# Run the workflow
|
|
await query_to_report(
|
|
query=query,
|
|
output_file=output_file,
|
|
num_results=7, # Get a good number of results for a comprehensive report
|
|
use_mock=use_mock
|
|
)
|
|
|
|
print(f"\nTest completed successfully!")
|
|
print(f"Report saved to: {output_file}")
|
|
|
|
# Print the first few lines of the report
|
|
try:
|
|
with open(output_file, 'r', encoding='utf-8') as f:
|
|
preview = f.read(1000) # Show a larger preview
|
|
print("\nReport Preview:")
|
|
print("-" * 80)
|
|
print(preview + "...")
|
|
print("-" * 80)
|
|
except Exception as e:
|
|
print(f"Error reading report: {e}")
|
|
|
|
|
|
def main():
|
|
"""Main function to parse arguments and run the test."""
|
|
parser = argparse.ArgumentParser(description='Test the query to report workflow with EV query')
|
|
parser.add_argument('--use-mock', '-m', action='store_true', help='Use mock data instead of API calls')
|
|
|
|
args = parser.parse_args()
|
|
|
|
# Run the test
|
|
asyncio.run(run_ev_test(use_mock=args.use_mock))
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|