2018-04-28 22:02:36 +08:00
|
|
|
package serve
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2018-05-18 17:18:38 +08:00
|
|
|
"io"
|
2018-04-28 22:02:36 +08:00
|
|
|
"net/http"
|
|
|
|
nurl "net/url"
|
2018-05-18 17:18:38 +08:00
|
|
|
"os"
|
|
|
|
fp "path/filepath"
|
2018-04-28 22:02:36 +08:00
|
|
|
"strings"
|
2018-05-19 17:11:18 +08:00
|
|
|
"sync"
|
2018-04-28 22:02:36 +08:00
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/RadhiFadlillah/shiori/model"
|
2018-05-18 14:34:49 +08:00
|
|
|
"github.com/RadhiFadlillah/shiori/readability"
|
2018-05-26 21:52:59 +08:00
|
|
|
valid "github.com/asaskevich/govalidator"
|
2018-04-28 22:02:36 +08:00
|
|
|
jwt "github.com/dgrijalva/jwt-go"
|
|
|
|
"github.com/julienschmidt/httprouter"
|
|
|
|
"golang.org/x/crypto/bcrypt"
|
|
|
|
)
|
|
|
|
|
|
|
|
// login is handler for POST /api/login
|
|
|
|
func (h *webHandler) apiLogin(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
|
|
|
// Decode request
|
|
|
|
var request model.LoginRequest
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&request)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Get account data from database
|
|
|
|
account, err := h.db.GetAccount(request.Username)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Compare password with database
|
|
|
|
err = bcrypt.CompareHashAndPassword([]byte(account.Password), []byte(request.Password))
|
|
|
|
if err != nil {
|
|
|
|
panic(fmt.Errorf("Username and password don't match"))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Calculate expiration time
|
|
|
|
nbf := time.Now()
|
|
|
|
exp := time.Now().Add(12 * time.Hour)
|
|
|
|
if request.Remember {
|
|
|
|
exp = time.Now().Add(7 * 24 * time.Hour)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create token
|
|
|
|
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
|
|
|
|
"nbf": nbf.Unix(),
|
|
|
|
"exp": exp.Unix(),
|
|
|
|
"sub": account.ID,
|
|
|
|
})
|
|
|
|
|
|
|
|
tokenString, err := token.SignedString(h.jwtKey)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Return token
|
|
|
|
fmt.Fprint(w, tokenString)
|
|
|
|
}
|
|
|
|
|
|
|
|
// apiGetBookmarks is handler for GET /api/bookmarks
|
|
|
|
func (h *webHandler) apiGetBookmarks(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
|
|
|
// Check token
|
|
|
|
err := h.checkAPIToken(r)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Get URL queries
|
|
|
|
keyword := r.URL.Query().Get("keyword")
|
|
|
|
strTags := r.URL.Query().Get("tags")
|
2018-05-24 22:53:34 +08:00
|
|
|
tags := strings.Split(strTags, ",")
|
|
|
|
if len(tags) == 1 && tags[0] == "" {
|
|
|
|
tags = []string{}
|
|
|
|
}
|
2018-04-28 22:02:36 +08:00
|
|
|
|
|
|
|
// Fetch all matching bookmarks
|
|
|
|
bookmarks, err := h.db.SearchBookmarks(true, keyword, tags...)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
err = json.NewEncoder(w).Encode(&bookmarks)
|
|
|
|
checkError(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// apiGetTags is handler for GET /api/tags
|
|
|
|
func (h *webHandler) apiGetTags(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
|
|
|
// Check token
|
|
|
|
err := h.checkAPIToken(r)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Fetch all tags
|
|
|
|
tags, err := h.db.GetTags()
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
err = json.NewEncoder(w).Encode(&tags)
|
|
|
|
checkError(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// apiInsertBookmark is handler for POST /api/bookmark
|
|
|
|
func (h *webHandler) apiInsertBookmark(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
2018-06-06 22:48:46 +08:00
|
|
|
// Enable CORS for this endpoint
|
|
|
|
w.Header().Set("Access-Control-Allow-Origin", "*")
|
|
|
|
w.Header().Set("Access-Control-Allow-Methods", "POST")
|
|
|
|
w.Header().Set("Access-Control-Allow-Headers", "Content-Type")
|
|
|
|
|
2018-04-28 22:02:36 +08:00
|
|
|
// Check token
|
|
|
|
err := h.checkAPIToken(r)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Decode request
|
|
|
|
book := model.Bookmark{}
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&book)
|
|
|
|
checkError(err)
|
|
|
|
|
2018-05-19 14:36:51 +08:00
|
|
|
// Make sure URL valid
|
2018-05-26 21:52:59 +08:00
|
|
|
parsedURL, err := nurl.Parse(book.URL)
|
|
|
|
if err != nil || !valid.IsRequestURL(book.URL) {
|
2018-05-19 14:36:51 +08:00
|
|
|
panic(fmt.Errorf("URL is not valid"))
|
|
|
|
}
|
|
|
|
|
2018-05-26 22:11:02 +08:00
|
|
|
// Clear fragment and UTM parameters from URL
|
|
|
|
parsedURL.Fragment = ""
|
2018-05-19 14:36:51 +08:00
|
|
|
clearUTMParams(parsedURL)
|
|
|
|
book.URL = parsedURL.String()
|
|
|
|
|
2018-05-18 17:18:38 +08:00
|
|
|
// Get new bookmark id
|
|
|
|
book.ID, err = h.db.GetNewID("bookmark")
|
|
|
|
checkError(err)
|
|
|
|
|
2018-04-28 22:02:36 +08:00
|
|
|
// Fetch data from internet
|
2018-05-20 17:36:18 +08:00
|
|
|
article, _ := readability.FromURL(parsedURL, 20*time.Second)
|
2018-04-28 22:02:36 +08:00
|
|
|
|
|
|
|
book.Author = article.Meta.Author
|
|
|
|
book.MinReadTime = article.Meta.MinReadTime
|
|
|
|
book.MaxReadTime = article.Meta.MaxReadTime
|
|
|
|
book.Content = article.Content
|
|
|
|
book.HTML = article.RawContent
|
|
|
|
|
2018-05-19 14:36:51 +08:00
|
|
|
// If title and excerpt doesnt have submitted value, use from article
|
|
|
|
if book.Title == "" {
|
|
|
|
book.Title = article.Meta.Title
|
|
|
|
}
|
|
|
|
|
|
|
|
if book.Excerpt == "" {
|
|
|
|
book.Excerpt = article.Meta.Excerpt
|
|
|
|
}
|
|
|
|
|
2018-04-28 22:02:36 +08:00
|
|
|
// Make sure title is not empty
|
|
|
|
if book.Title == "" {
|
2018-05-18 15:07:15 +08:00
|
|
|
book.Title = book.URL
|
2018-04-28 22:02:36 +08:00
|
|
|
}
|
|
|
|
|
2018-05-19 14:36:51 +08:00
|
|
|
// Check if book has content
|
|
|
|
if book.Content != "" {
|
|
|
|
book.HasContent = true
|
|
|
|
}
|
|
|
|
|
2018-05-18 17:18:38 +08:00
|
|
|
// Save bookmark image to local disk
|
|
|
|
imgPath := fp.Join(h.dataDir, "thumb", fmt.Sprintf("%d", book.ID))
|
|
|
|
err = downloadFile(article.Meta.Image, imgPath, 20*time.Second)
|
|
|
|
if err == nil {
|
|
|
|
book.ImageURL = fmt.Sprintf("/thumb/%d", book.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Save bookmark to database
|
2018-05-28 22:22:17 +08:00
|
|
|
_, err = h.db.InsertBookmark(book)
|
2018-06-06 22:48:46 +08:00
|
|
|
if err != nil {
|
|
|
|
fmt.Println(err)
|
|
|
|
book.ID = h.db.GetBookmarkID(book.URL)
|
|
|
|
book.Modified = time.Now().UTC().Format("2006-01-02 15:04:05")
|
|
|
|
fmt.Println(book.ID, book.Modified)
|
|
|
|
_, err = h.db.UpdateBookmarks(book)
|
|
|
|
checkError(err)
|
|
|
|
}
|
2018-04-28 22:02:36 +08:00
|
|
|
|
|
|
|
// Return new saved result
|
|
|
|
err = json.NewEncoder(w).Encode(&book)
|
|
|
|
checkError(err)
|
|
|
|
}
|
|
|
|
|
2018-05-19 14:36:51 +08:00
|
|
|
// apiDeleteBookmarks is handler for DELETE /api/bookmark
|
|
|
|
func (h *webHandler) apiDeleteBookmark(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
|
|
|
// Check token
|
|
|
|
err := h.checkAPIToken(r)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Decode request
|
2018-05-19 23:43:15 +08:00
|
|
|
ids := []int{}
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&ids)
|
2018-05-19 14:36:51 +08:00
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Delete bookmarks
|
2018-05-19 23:43:15 +08:00
|
|
|
err = h.db.DeleteBookmarks(ids...)
|
2018-05-19 14:36:51 +08:00
|
|
|
checkError(err)
|
|
|
|
|
2018-05-19 23:43:15 +08:00
|
|
|
// Delete thumbnail image from local disk
|
|
|
|
for _, id := range ids {
|
|
|
|
imgPath := fp.Join(h.dataDir, "thumb", fmt.Sprintf("%d", id))
|
|
|
|
os.Remove(imgPath)
|
|
|
|
}
|
|
|
|
|
2018-05-19 14:36:51 +08:00
|
|
|
fmt.Fprint(w, 1)
|
|
|
|
}
|
|
|
|
|
2018-05-23 17:33:06 +08:00
|
|
|
// apiUpdateBookmark is handler for PUT /api/bookmarks
|
2018-04-28 22:02:36 +08:00
|
|
|
func (h *webHandler) apiUpdateBookmark(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
|
|
|
// Check token
|
|
|
|
err := h.checkAPIToken(r)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Decode request
|
|
|
|
request := model.Bookmark{}
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&request)
|
|
|
|
checkError(err)
|
|
|
|
|
2018-05-19 16:28:17 +08:00
|
|
|
// Validate input
|
2018-05-19 17:11:18 +08:00
|
|
|
if request.Title == "" {
|
2018-05-19 16:28:17 +08:00
|
|
|
panic(fmt.Errorf("Title must not empty"))
|
2018-04-28 22:02:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get existing bookmark from database
|
2018-05-19 23:43:15 +08:00
|
|
|
bookmarks, err := h.db.GetBookmarks(true, request.ID)
|
2018-04-28 22:02:36 +08:00
|
|
|
checkError(err)
|
|
|
|
if len(bookmarks) == 0 {
|
|
|
|
panic(fmt.Errorf("No bookmark with matching index"))
|
|
|
|
}
|
|
|
|
|
2018-05-19 16:28:17 +08:00
|
|
|
// Set new bookmark data
|
2018-04-28 22:02:36 +08:00
|
|
|
book := bookmarks[0]
|
2018-05-19 16:28:17 +08:00
|
|
|
book.Title = request.Title
|
|
|
|
book.Excerpt = request.Excerpt
|
2018-04-28 22:02:36 +08:00
|
|
|
|
2018-05-19 16:28:17 +08:00
|
|
|
// Set new tags
|
|
|
|
for i := range book.Tags {
|
|
|
|
book.Tags[i].Deleted = true
|
2018-04-28 22:02:36 +08:00
|
|
|
}
|
|
|
|
|
2018-05-19 16:28:17 +08:00
|
|
|
for _, newTag := range request.Tags {
|
|
|
|
for i, oldTag := range book.Tags {
|
|
|
|
if newTag.Name == oldTag.Name {
|
|
|
|
newTag.ID = oldTag.ID
|
|
|
|
book.Tags[i].Deleted = false
|
|
|
|
break
|
|
|
|
}
|
2018-04-28 22:02:36 +08:00
|
|
|
}
|
|
|
|
|
2018-05-19 16:28:17 +08:00
|
|
|
if newTag.ID == 0 {
|
|
|
|
book.Tags = append(book.Tags, newTag)
|
2018-04-28 22:02:36 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update database
|
|
|
|
res, err := h.db.UpdateBookmarks(book)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Return new saved result
|
|
|
|
err = json.NewEncoder(w).Encode(&res[0])
|
|
|
|
checkError(err)
|
|
|
|
}
|
|
|
|
|
2018-05-23 17:33:06 +08:00
|
|
|
// apiUpdateBookmarkTags is handler for PUT /api/bookmarks/tags
|
|
|
|
func (h *webHandler) apiUpdateBookmarkTags(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
|
|
|
// Check token
|
|
|
|
err := h.checkAPIToken(r)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Decode request
|
|
|
|
request := struct {
|
|
|
|
IDs []int `json:"ids"`
|
|
|
|
Tags []model.Tag `json:"tags"`
|
|
|
|
}{}
|
|
|
|
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&request)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Validate input
|
|
|
|
if len(request.IDs) == 0 || len(request.Tags) == 0 {
|
|
|
|
panic(fmt.Errorf("IDs and tags must not empty"))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get existing bookmark from database
|
|
|
|
bookmarks, err := h.db.GetBookmarks(true, request.IDs...)
|
|
|
|
checkError(err)
|
|
|
|
if len(bookmarks) == 0 {
|
|
|
|
panic(fmt.Errorf("No bookmark with matching index"))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set new tags
|
|
|
|
for i, book := range bookmarks {
|
|
|
|
for _, newTag := range request.Tags {
|
|
|
|
for _, oldTag := range book.Tags {
|
|
|
|
if newTag.Name == oldTag.Name {
|
|
|
|
newTag.ID = oldTag.ID
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if newTag.ID == 0 {
|
|
|
|
book.Tags = append(book.Tags, newTag)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bookmarks[i] = book
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update database
|
|
|
|
res, err := h.db.UpdateBookmarks(bookmarks...)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Return new saved result
|
|
|
|
err = json.NewEncoder(w).Encode(&res)
|
|
|
|
checkError(err)
|
|
|
|
}
|
|
|
|
|
2018-05-19 17:11:18 +08:00
|
|
|
// apiUpdateCache is handler for PUT /api/cache
|
|
|
|
func (h *webHandler) apiUpdateCache(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
|
|
|
|
// Check token
|
|
|
|
err := h.checkAPIToken(r)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Decode request
|
2018-05-19 23:43:15 +08:00
|
|
|
ids := []int{}
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&ids)
|
2018-05-19 17:11:18 +08:00
|
|
|
checkError(err)
|
|
|
|
|
2018-05-28 21:22:42 +08:00
|
|
|
// Prepare wait group and mutex
|
|
|
|
mx := sync.Mutex{}
|
2018-05-19 17:11:18 +08:00
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
|
|
|
|
// Fetch bookmarks from database
|
2018-05-19 23:43:15 +08:00
|
|
|
books, err := h.db.GetBookmarks(false, ids...)
|
2018-05-19 17:11:18 +08:00
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Download new cache data
|
|
|
|
for i, book := range books {
|
|
|
|
wg.Add(1)
|
|
|
|
|
|
|
|
go func(pos int, book model.Bookmark) {
|
2018-05-28 21:22:42 +08:00
|
|
|
// Make sure to stop wait group
|
2018-05-19 17:11:18 +08:00
|
|
|
defer wg.Done()
|
|
|
|
|
|
|
|
// Parse URL
|
2018-05-26 21:52:59 +08:00
|
|
|
parsedURL, err := nurl.Parse(book.URL)
|
|
|
|
if err != nil || !valid.IsRequestURL(book.URL) {
|
2018-05-19 17:11:18 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fetch data from internet
|
2018-05-20 17:36:18 +08:00
|
|
|
article, err := readability.FromURL(parsedURL, 20*time.Second)
|
2018-05-19 17:11:18 +08:00
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
book.Excerpt = article.Meta.Excerpt
|
|
|
|
book.Author = article.Meta.Author
|
|
|
|
book.MinReadTime = article.Meta.MinReadTime
|
|
|
|
book.MaxReadTime = article.Meta.MaxReadTime
|
|
|
|
book.Content = article.Content
|
|
|
|
book.HTML = article.RawContent
|
|
|
|
|
|
|
|
// Make sure title is not empty
|
|
|
|
if article.Meta.Title != "" {
|
|
|
|
book.Title = article.Meta.Title
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check if book has content
|
|
|
|
if book.Content != "" {
|
|
|
|
book.HasContent = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update bookmark image in local disk
|
|
|
|
imgPath := fp.Join(h.dataDir, "thumb", fmt.Sprintf("%d", book.ID))
|
|
|
|
err = downloadFile(article.Meta.Image, imgPath, 20*time.Second)
|
|
|
|
if err == nil {
|
|
|
|
book.ImageURL = fmt.Sprintf("/thumb/%d", book.ID)
|
|
|
|
}
|
|
|
|
|
2018-05-28 21:22:42 +08:00
|
|
|
// Update list of bookmarks
|
|
|
|
mx.Lock()
|
2018-05-19 17:11:18 +08:00
|
|
|
books[pos] = book
|
2018-05-28 21:22:42 +08:00
|
|
|
mx.Unlock()
|
2018-05-19 17:11:18 +08:00
|
|
|
}(i, book)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Wait until all finished
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
// Update database
|
|
|
|
res, err := h.db.UpdateBookmarks(books...)
|
|
|
|
checkError(err)
|
|
|
|
|
|
|
|
// Return new saved result
|
|
|
|
err = json.NewEncoder(w).Encode(&res)
|
|
|
|
checkError(err)
|
|
|
|
}
|
|
|
|
|
2018-05-18 17:18:38 +08:00
|
|
|
func downloadFile(url, dstPath string, timeout time.Duration) error {
|
|
|
|
// Fetch data from URL
|
|
|
|
client := &http.Client{Timeout: timeout}
|
|
|
|
resp, err := client.Get(url)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
// Make sure destination directory exist
|
|
|
|
err = os.MkdirAll(fp.Dir(dstPath), os.ModePerm)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2018-04-28 22:02:36 +08:00
|
|
|
|
2018-05-18 17:18:38 +08:00
|
|
|
// Create destination file
|
|
|
|
dst, err := os.Create(dstPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer dst.Close()
|
|
|
|
|
|
|
|
// Write response body to the file
|
|
|
|
_, err = io.Copy(dst, resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2018-04-28 22:02:36 +08:00
|
|
|
}
|
|
|
|
|
2018-05-18 17:18:38 +08:00
|
|
|
return nil
|
2018-04-28 22:02:36 +08:00
|
|
|
}
|
2018-05-19 14:36:51 +08:00
|
|
|
|
|
|
|
func clearUTMParams(url *nurl.URL) {
|
|
|
|
newQuery := nurl.Values{}
|
|
|
|
for key, value := range url.Query() {
|
|
|
|
if !strings.HasPrefix(key, "utm_") {
|
|
|
|
newQuery[key] = value
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
url.RawQuery = newQuery.Encode()
|
|
|
|
}
|