diff --git a/restapi/restapi.go b/restapi/restapi.go index a3edf42..07090a3 100644 --- a/restapi/restapi.go +++ b/restapi/restapi.go @@ -13,24 +13,77 @@ import ( ) var trendingexport string +var trendingHashTags string +var trendingwords string + +var hashtagtotal map[string]interface{} +var wordstotal map[string]interface{} +//hashtagmeta := make(map[string]interface{}); func enableCors(w *http.ResponseWriter) { (*w).Header().Set("Access-Control-Allow-Origin", "*") } -func gettrends() { - var err error - var rows pgx.Rows +func getTrendingHashtags() { + sql := "SELECT UNNEST(activities.hashtags) as hashtags, count(actors.id) " + + "FROM activities " + + "LEFT JOIN actors ON activities.document->>'attributedTo'=actors.document->>'id' " + + "WHERE actors.bot=TRUE " + + //"AND activities.identifiedat > LOCALTIMESTAMP - INTERVAL '30 MINUTES' " + + "GROUP BY hashtags ORDER BY count DESC LIMIT 20" for { - rows, err = pool.Query(context.Background(), "SELECT word, ndoc FROM ts_stat($$ SELECT normalized_tsvector FROM activities WHERE activities.identifiedat > current_timestamp - interval '60 minutes' $$) ORDER BY ndoc DESC LIMIT 10") + rows, err := pool.Query(context.Background(), sql) + if err != nil { + panic(err) + } + + hashtagitems := make([]interface{}, 0); + hashcount := 0 + for rows.Next() { + var hashtag string + var count int + + err = rows.Scan(&hashtag, &count) + if err != nil { + panic(err) + } + + hashtagitem := make(map[string]interface{}) + hashtagitem["hashtag"] = hashtag + hashtagitem["count"] = count + hashtagitems = append(hashtagitems, hashtagitem) + hashcount = hashcount + 1 + } + rows.Close() + + hashtagtotal = make(map[string]interface{}); + hashtagtotal["count"] = hashcount + hashtagtotal["datetime"] = time.Now().UTC() + hashtagtotal["items"] = hashtagitems + + totalJson := make(map[string]interface{}) + totalJson["hashtags"] = hashtagtotal + + data, err := json.Marshal(totalJson) + if err != nil { + log.Fatalf("error marshaling combined activity: %v\n", err) + } + + trendingHashTags = string(data) + time.Sleep(time.Minute * 30) + + } +} + +func gettrends() { + for { + rows, err := pool.Query(context.Background(), "SELECT word, ndoc FROM ts_stat($$ SELECT normalized_tsvector FROM activities WHERE activities.identifiedat > current_timestamp - interval '60 minutes' $$) ORDER BY ndoc DESC LIMIT 10") if err != nil { panic(err) } - defer rows.Close() trenditems := make([]interface{}, 0) - //fmt.Println(trenditems) for rows.Next() { var word string @@ -46,6 +99,7 @@ func gettrends() { trenditem["word"] = word trenditems = append(trenditems, trenditem) } + rows.Close() totalJson := make(map[string]interface{}) totalJson["trends"] = trenditems @@ -61,12 +115,28 @@ func gettrends() { } } -func trending(w http.ResponseWriter, r *http.Request) { + +func + + + + +func gettrending(w http.ResponseWriter, r *http.Request) { + enableCors(&w) +// fmt.Fprintf(w, "%s", trendingexport) +} + +func gettrendinghashtags(w http.ResponseWriter, r *http.Request) { + enableCors(&w) + fmt.Fprintf(w, "%s", trendingHashTags) +} + +func gettrendingwords(w http.ResponseWriter, r *http.Request) { enableCors(&w) fmt.Fprintf(w, "%s", trendingexport) } -func search(w http.ResponseWriter, r *http.Request) { +func getsearch(w http.ResponseWriter, r *http.Request) { enableCors(&w) searchkeys, exists_search := r.URL.Query()["s"] offsetkeys, exists_offset := r.URL.Query()["o"] @@ -149,9 +219,12 @@ func main() { pool = getDbPool() go gettrends() + go getTrendingHashtags() - http.HandleFunc("/api/v1/search", search) - http.HandleFunc("/api/v1/trending", trending) + http.HandleFunc("/api/v1/search", getsearch) + http.HandleFunc("/api/v1/trending", gettrending) + http.HandleFunc("/api/v1/trending/hashtags", gettrendinghashtags) + http.HandleFunc("/api/v1/trending/words", gettrendingwords) log.Print("Starting HTTP inbox on port http://0.0.0.0:6431") log.Fatal(http.ListenAndServe("0.0.0.0:6431", nil)) }