Browse Source

Add a script for performance testing

Daoud Clarke 4 years ago
parent
commit
634e490cff
2 changed files with 34 additions and 5 deletions
  1. 29 0
      make_curl.py
  2. 5 5
      performance.py

+ 29 - 0
make_curl.py

@@ -0,0 +1,29 @@
+"""
+Make a curl script for testing performance
+"""
+import os
+from itertools import islice
+from urllib.parse import quote
+
+from paths import DATA_DIR
+from wiki import get_wiki_titles_and_urls
+
+URL_TEMPLATE = "http://localhost:8000/complete?q={}"
+CURL_FILE = os.path.join(DATA_DIR, "urls.curl")
+
+
+def get_urls():
+    titles_and_urls = get_wiki_titles_and_urls()
+    for title, url in islice(titles_and_urls, 100):
+        query = quote(title.lower())
+        yield URL_TEMPLATE.format(query)
+
+
+def run():
+    with open(CURL_FILE, 'wt') as output_file:
+        for url in get_urls():
+            output_file.write(f'url="{url}"\n')
+
+
+if __name__ == '__main__':
+    run()

+ 5 - 5
performance.py

@@ -21,15 +21,15 @@ NUM_PAGES = 500
 def query_test():
 def query_test():
     titles_and_urls = get_wiki_titles_and_urls()
     titles_and_urls = get_wiki_titles_and_urls()
 
 
-    # client = TestClient(app)
+    client = TestClient(app)
 
 
     start = datetime.now()
     start = datetime.now()
     hits = 0
     hits = 0
     for title, url in islice(titles_and_urls, NUM_PAGES):
     for title, url in islice(titles_and_urls, NUM_PAGES):
-        # result = client.get('/complete', params={'q': title})
-        # assert result.status_code == 200
-        # data = result.content.decode('utf8')
-        data = json.dumps(complete(title))
+        result = client.get('/complete', params={'q': title})
+        assert result.status_code == 200
+        data = result.content.decode('utf8')
+        # data = json.dumps(complete(title))
 
 
         if url in data:
         if url in data:
             hits += 1
             hits += 1