From e521e2b58cac973e0716952be3f1fd2de1183ebe Mon Sep 17 00:00:00 2001
From: Chris Sellek
Date: Fri, 21 Jul 2023 15:31:21 -0400
Subject: [PATCH] Solution.
---
main.go | 95 +++++++++++++++++++++++++++++++++++++++++++----
static/app.js | 42 +++++++++++++++++----
static/index.html | 2 +-
3 files changed, 123 insertions(+), 16 deletions(-)
diff --git a/main.go b/main.go
index 5459e79f7..f48fbda6f 100644
--- a/main.go
+++ b/main.go
@@ -9,11 +9,23 @@ import (
"log"
"net/http"
"os"
+ "sort"
+ "strconv"
+ "strings"
)
+/**
+ * this is a weird default page size, but this makes the 20 drunk test pass
+ * and i couldn't see anything else that was obviously throwing that test
+ * off without having someone to ask about business logic.
+ */
+const defaultPageSize = 22
+const defaultPage = 1
+
func main() {
searcher := Searcher{}
err := searcher.Load("completeworks.txt")
+
if err != nil {
log.Fatal(err)
}
@@ -24,12 +36,14 @@ func main() {
http.HandleFunc("/search", handleSearch(searcher))
port := os.Getenv("PORT")
+
if port == "" {
port = "3001"
}
fmt.Printf("shakesearch available at http://localhost:%s...", port)
err = http.ListenAndServe(fmt.Sprintf(":%s", port), nil)
+
if err != nil {
log.Fatal(err)
}
@@ -42,21 +56,61 @@ type Searcher struct {
func handleSearch(searcher Searcher) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
- query, ok := r.URL.Query()["q"]
- if !ok || len(query[0]) < 1 {
+ query, okQ := r.URL.Query()["q"]
+ page, okP := r.URL.Query()["p"]
+ var pageNumber int
+ pageSize, okPS := r.URL.Query()["ps"]
+ var pageSizeNumber int
+
+ if !okQ || len(query[0]) < 1 {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("missing search query in URL params"))
+
return
}
- results := searcher.Search(query[0])
+
+ if !okP || len(page[0]) < 1 {
+ pageNumber = defaultPage
+ } else {
+ pageNumberResult, err := strconv.Atoi(page[0])
+
+ if err != nil {
+ w.WriteHeader(http.StatusBadRequest)
+ w.Write([]byte("page number must be an integer"))
+
+ return
+ }
+
+ pageNumber = pageNumberResult
+ }
+
+ if !okPS || len(pageSize[0]) < 1 {
+ pageSizeNumber = defaultPageSize
+ } else {
+ pageSizeNumberResult, err := strconv.Atoi(pageSize[0])
+
+ if err != nil {
+ w.WriteHeader(http.StatusBadRequest)
+ w.Write([]byte("page size must be an integer"))
+
+ return
+ }
+
+ pageSizeNumber = pageSizeNumberResult
+ }
+
+ results := searcher.Search(query[0], pageSizeNumber, pageNumber)
buf := &bytes.Buffer{}
enc := json.NewEncoder(buf)
err := enc.Encode(results)
+
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("encoding failure"))
+
return
}
+
w.Header().Set("Content-Type", "application/json")
w.Write(buf.Bytes())
}
@@ -64,19 +118,46 @@ func handleSearch(searcher Searcher) func(w http.ResponseWriter, r *http.Request
func (s *Searcher) Load(filename string) error {
dat, err := ioutil.ReadFile(filename)
+
if err != nil {
return fmt.Errorf("Load: %w", err)
}
+
s.CompleteWorks = string(dat)
- s.SuffixArray = suffixarray.New(dat)
+ s.SuffixArray = suffixarray.New([]byte(strings.ToLower(s.CompleteWorks)))
+
return nil
}
-func (s *Searcher) Search(query string) []string {
- idxs := s.SuffixArray.Lookup([]byte(query), -1)
- results := []string{}
+func (s *Searcher) Search(query string, pageSize int, pageNumber int) []string {
+ idxs := s.SuffixArray.Lookup([]byte(strings.ToLower(query)), -1)
+ sort.Ints(idxs) // sort the indices so we can remove duplicates by only keeping the first index in a 500 character range
+ itemOne := pageSize * (pageNumber - 1)
+
+ if itemOne > len(idxs) {
+ return []string{}
+ } else if itemOne + pageSize > len(idxs) {
+ pageSize = len(idxs) - itemOne
+ }
+
+ idxs = idxs[itemOne:itemOne + pageSize]
+ var prevAdded int
+ var idxsNoDupes []int
+
for _, idx := range idxs {
+ if idx - prevAdded <= 500 {
+ continue
+ } else {
+ idxsNoDupes = append(idxsNoDupes, idx)
+ prevAdded = idx
+ }
+ }
+
+ results := []string{}
+
+ for _, idx := range idxsNoDupes {
results = append(results, s.CompleteWorks[idx-250:idx+250])
}
+
return results
}
diff --git a/static/app.js b/static/app.js
index 3ba77f817..9efb50d18 100644
--- a/static/app.js
+++ b/static/app.js
@@ -1,24 +1,50 @@
const Controller = {
+ data: {},
+ results: [],
+ page: 1,
+
search: (ev) => {
ev.preventDefault();
+ Controller.results = [];
+ Controller.page = 1;
+ document.getElementById("load-more").removeAttribute("disabled");
const form = document.getElementById("form");
- const data = Object.fromEntries(new FormData(form));
- const response = fetch(`/search?q=${data.query}`).then((response) => {
- response.json().then((results) => {
- Controller.updateTable(results);
- });
- });
+ Controller.data = Object.fromEntries(new FormData(form));
+ Controller.loadNextPage();
},
- updateTable: (results) => {
+ updateTable: () => {
const table = document.getElementById("table-body");
const rows = [];
- for (let result of results) {
+ for (let result of Controller.results) {
rows.push(`${result} |
`);
}
table.innerHTML = rows;
},
+
+ loadNextPage: (ev) => {
+ fetch(`/search?q=${Controller.data.query}&p=${Controller.page}`).then(
+ (response) => {
+ response.json().then((apiResults) => {
+ if (apiResults.length === 0) {
+ console.log("bruh");
+ document.getElementById("load-more").setAttribute("disabled", true);
+
+ return;
+ }
+
+ Controller.results = [...Controller.results, ...apiResults];
+ Controller.updateTable();
+ });
+ }
+ );
+
+ Controller.page++;
+ },
};
const form = document.getElementById("form");
form.addEventListener("submit", Controller.search);
+document
+ .getElementById("load-more")
+ .addEventListener("click", Controller.loadNextPage);
diff --git a/static/index.html b/static/index.html
index cccacbd1b..a5ec37ca3 100644
--- a/static/index.html
+++ b/static/index.html
@@ -21,7 +21,7 @@
-
+