Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Solution. #40

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 88 additions & 7 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,23 @@ import (
"log"
"net/http"
"os"
"sort"
"strconv"
"strings"
)

/**
* this is a weird default page size, but this makes the 20 drunk test pass
* and i couldn't see anything else that was obviously throwing that test
* off without having someone to ask about business logic.
*/
const defaultPageSize = 22
const defaultPage = 1

func main() {
searcher := Searcher{}
err := searcher.Load("completeworks.txt")

if err != nil {
log.Fatal(err)
}
Expand All @@ -24,12 +36,14 @@ func main() {
http.HandleFunc("/search", handleSearch(searcher))

port := os.Getenv("PORT")

if port == "" {
port = "3001"
}

fmt.Printf("shakesearch available at http://localhost:%s...", port)
err = http.ListenAndServe(fmt.Sprintf(":%s", port), nil)

if err != nil {
log.Fatal(err)
}
Expand All @@ -42,41 +56,108 @@ type Searcher struct {

func handleSearch(searcher Searcher) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
query, ok := r.URL.Query()["q"]
if !ok || len(query[0]) < 1 {
query, okQ := r.URL.Query()["q"]
page, okP := r.URL.Query()["p"]
var pageNumber int
pageSize, okPS := r.URL.Query()["ps"]
var pageSizeNumber int

if !okQ || len(query[0]) < 1 {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("missing search query in URL params"))

return
}
results := searcher.Search(query[0])

if !okP || len(page[0]) < 1 {
pageNumber = defaultPage
} else {
pageNumberResult, err := strconv.Atoi(page[0])

if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("page number must be an integer"))

return
}

pageNumber = pageNumberResult
}

if !okPS || len(pageSize[0]) < 1 {
pageSizeNumber = defaultPageSize
} else {
pageSizeNumberResult, err := strconv.Atoi(pageSize[0])

if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("page size must be an integer"))

return
}

pageSizeNumber = pageSizeNumberResult
}

results := searcher.Search(query[0], pageSizeNumber, pageNumber)
buf := &bytes.Buffer{}
enc := json.NewEncoder(buf)
err := enc.Encode(results)

if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("encoding failure"))

return
}

w.Header().Set("Content-Type", "application/json")
w.Write(buf.Bytes())
}
}

func (s *Searcher) Load(filename string) error {
dat, err := ioutil.ReadFile(filename)

if err != nil {
return fmt.Errorf("Load: %w", err)
}

s.CompleteWorks = string(dat)
s.SuffixArray = suffixarray.New(dat)
s.SuffixArray = suffixarray.New([]byte(strings.ToLower(s.CompleteWorks)))

return nil
}

func (s *Searcher) Search(query string) []string {
idxs := s.SuffixArray.Lookup([]byte(query), -1)
results := []string{}
func (s *Searcher) Search(query string, pageSize int, pageNumber int) []string {
idxs := s.SuffixArray.Lookup([]byte(strings.ToLower(query)), -1)
sort.Ints(idxs) // sort the indices so we can remove duplicates by only keeping the first index in a 500 character range
itemOne := pageSize * (pageNumber - 1)

if itemOne > len(idxs) {
return []string{}
} else if itemOne + pageSize > len(idxs) {
pageSize = len(idxs) - itemOne
}

idxs = idxs[itemOne:itemOne + pageSize]
var prevAdded int
var idxsNoDupes []int

for _, idx := range idxs {
if idx - prevAdded <= 500 {
continue
} else {
idxsNoDupes = append(idxsNoDupes, idx)
prevAdded = idx
}
}

results := []string{}

for _, idx := range idxsNoDupes {
results = append(results, s.CompleteWorks[idx-250:idx+250])
}

return results
}
42 changes: 34 additions & 8 deletions static/app.js
Original file line number Diff line number Diff line change
@@ -1,24 +1,50 @@
const Controller = {
data: {},
results: [],
page: 1,

search: (ev) => {
ev.preventDefault();
Controller.results = [];
Controller.page = 1;
document.getElementById("load-more").removeAttribute("disabled");
const form = document.getElementById("form");
const data = Object.fromEntries(new FormData(form));
const response = fetch(`/search?q=${data.query}`).then((response) => {
response.json().then((results) => {
Controller.updateTable(results);
});
});
Controller.data = Object.fromEntries(new FormData(form));
Controller.loadNextPage();
},

updateTable: (results) => {
updateTable: () => {
const table = document.getElementById("table-body");
const rows = [];
for (let result of results) {
for (let result of Controller.results) {
rows.push(`<tr><td>${result}</td></tr>`);
}
table.innerHTML = rows;
},

loadNextPage: (ev) => {
fetch(`/search?q=${Controller.data.query}&p=${Controller.page}`).then(
(response) => {
response.json().then((apiResults) => {
if (apiResults.length === 0) {
console.log("bruh");
document.getElementById("load-more").setAttribute("disabled", true);

return;
}

Controller.results = [...Controller.results, ...apiResults];
Controller.updateTable();
});
}
);

Controller.page++;
},
};

const form = document.getElementById("form");
form.addEventListener("submit", Controller.search);
document
.getElementById("load-more")
.addEventListener("click", Controller.loadNextPage);
2 changes: 1 addition & 1 deletion static/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
<tbody id="table-body"></tbody>
</table>
</p>
<button>Load More</button>
<button id="load-more">Load More</button>
<script src="app.js"></script>
</body>

Expand Down