Compare commits

...

10 commits
main ... local

Author SHA1 Message Date
Daoud Clarke
9920fc5ddd Disguise URLs so Firefox doesn't recognise them and filter them out 2022-08-13 10:49:55 +01:00
Daoud Clarke
a8bbb9f303 Missing import 2022-08-13 10:14:28 +01:00
Daoud Clarke
6022d867a3 Merge branch 'completion' into local 2022-08-13 10:08:37 +01:00
Daoud Clarke
9b22c32322 Merge branch 'improve-ranking-with-multi-term-search' into local 2022-08-09 22:50:56 +01:00
Daoud Clarke
a40259af30 Search for the term itself as well as its completion 2022-08-01 23:38:14 +01:00
Daoud Clarke
87d2e9474c Merge branch 'improve-ranking-with-multi-term-search' into local 2022-08-01 23:33:52 +01:00
Daoud Clarke
3137068c77 More threads for retrieving batches 2022-07-30 10:09:27 +01:00
Daoud Clarke
e79f1ce10b Delete unused SQL 2022-07-30 09:27:44 +01:00
Daoud Clarke
c52faeaddc Merge branch 'reduce-contention-for-client-queries' into local 2022-07-24 17:02:37 +01:00
Daoud Clarke
a93fbe9d66 Add CORS for local testing 2022-07-10 17:13:27 +01:00
2 changed files with 12 additions and 1 deletions

View file

@ -7,6 +7,7 @@ from pathlib import Path
import uvicorn import uvicorn
from fastapi import FastAPI from fastapi import FastAPI
from starlette.middleware.cors import CORSMiddleware
from mwmbl import background from mwmbl import background
from mwmbl.crawler import app as crawler from mwmbl.crawler import app as crawler
@ -65,6 +66,16 @@ def run():
# Initialize FastApi instance # Initialize FastApi instance
app = FastAPI() app = FastAPI()
origins = ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
search_router = search.create_router(ranker) search_router = search.create_router(ranker)
app.include_router(search_router) app.include_router(search_router)

View file

@ -157,7 +157,7 @@ class Ranker:
def complete(self, q: str): def complete(self, q: str):
ordered_results, terms, completions = self.get_results(q) ordered_results, terms, completions = self.get_results(q)
filtered_completions = [c for c in completions if c != terms[-1]] filtered_completions = [c for c in completions if c != terms[-1]]
urls = [item.url[len(HTTPS_STRING):].rstrip('/') for item in ordered_results[:5] urls = [item.url[len(HTTPS_STRING):].rstrip('/').replace('.', '').replace('/', ' ') for item in ordered_results[:5]
if item.url.startswith(HTTPS_STRING) and all(term in item.url for term in terms)][:1] if item.url.startswith(HTTPS_STRING) and all(term in item.url for term in terms)][:1]
completed = [' '.join(terms[:-1] + [t]) for t in filtered_completions] completed = [' '.join(terms[:-1] + [t]) for t in filtered_completions]
return [q, urls + completed] return [q, urls + completed]