Files
torrent-indexer/api/comando_torrents.go

470 lines
12 KiB
Go
Raw Normal View History

package handler
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"regexp"
"slices"
2023-09-22 21:13:57 -03:00
"strings"
2023-09-22 22:12:38 -03:00
"time"
"github.com/PuerkitoBio/goquery"
"github.com/felipemarinho97/torrent-indexer/magnet"
"github.com/felipemarinho97/torrent-indexer/schema"
goscrape "github.com/felipemarinho97/torrent-indexer/scrape"
"github.com/felipemarinho97/torrent-indexer/utils"
"github.com/hbollon/go-edlib"
)
2023-10-01 20:27:44 +00:00
var comando = IndexerMeta{
URL: "https://comando.la/",
SearchURL: "?s=",
}
2023-09-22 22:12:38 -03:00
var replacer = strings.NewReplacer(
"janeiro", "01",
"fevereiro", "02",
"março", "03",
"abril", "04",
"maio", "05",
"junho", "06",
"julho", "07",
"agosto", "08",
"setembro", "09",
"outubro", "10",
"novembro", "11",
"dezembro", "12",
)
func (i *Indexer) HandlerComandoIndexer(w http.ResponseWriter, r *http.Request) {
start := time.Now()
defer func() {
i.metrics.IndexerDuration.WithLabelValues("comando").Observe(time.Since(start).Seconds())
i.metrics.IndexerRequests.WithLabelValues("comando").Inc()
}()
ctx := r.Context()
// supported query params: q, season, episode, page, filter_results
q := r.URL.Query().Get("q")
page := r.URL.Query().Get("page")
// URL encode query param
q = url.QueryEscape(q)
2023-10-01 20:27:44 +00:00
url := comando.URL
if q != "" {
2023-10-01 20:27:44 +00:00
url = fmt.Sprintf("%s%s%s", url, comando.SearchURL, q)
} else if page != "" {
url = fmt.Sprintf("%spage/%s", url, page)
}
fmt.Println("URL:>", url)
resp, err := i.requester.GetDocument(ctx, url)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
err = json.NewEncoder(w).Encode(map[string]string{"error": err.Error()})
if err != nil {
fmt.Println(err)
}
i.metrics.IndexerErrors.WithLabelValues("comando").Inc()
return
}
defer resp.Close()
doc, err := goquery.NewDocumentFromReader(resp)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
err = json.NewEncoder(w).Encode(map[string]string{"error": err.Error()})
if err != nil {
fmt.Println(err)
}
i.metrics.IndexerErrors.WithLabelValues("comando").Inc()
return
}
var links []string
doc.Find("article").Each(func(i int, s *goquery.Selection) {
// get link from h2.entry-title > a
link, _ := s.Find("h2.entry-title > a").Attr("href")
links = append(links, link)
})
var itChan = make(chan []schema.IndexedTorrent)
2023-09-22 22:12:38 -03:00
var errChan = make(chan error)
indexedTorrents := []schema.IndexedTorrent{}
for _, link := range links {
2023-09-22 22:12:38 -03:00
go func(link string) {
torrents, err := getTorrents(ctx, i, link)
2023-09-22 22:12:38 -03:00
if err != nil {
fmt.Println(err)
errChan <- err
}
itChan <- torrents
}(link)
}
for i := 0; i < len(links); i++ {
select {
case torrents := <-itChan:
indexedTorrents = append(indexedTorrents, torrents...)
case err := <-errChan:
fmt.Println(err)
}
}
for i, it := range indexedTorrents {
jLower := strings.ReplaceAll(strings.ToLower(fmt.Sprintf("%s %s", it.Title, it.OriginalTitle)), ".", " ")
qLower := strings.ToLower(q)
splitLength := 2
indexedTorrents[i].Similarity = edlib.JaccardSimilarity(jLower, qLower, splitLength)
}
// remove the ones with zero similarity
if len(indexedTorrents) > 20 && r.URL.Query().Get("filter_results") != "" && r.URL.Query().Get("q") != "" {
indexedTorrents = utils.Filter(indexedTorrents, func(it schema.IndexedTorrent) bool {
return it.Similarity > 0
})
}
// sort by similarity
slices.SortFunc(indexedTorrents, func(i, j schema.IndexedTorrent) int {
return int((j.Similarity - i.Similarity) * 1000)
})
// send to search index
go func() {
_ = i.search.IndexTorrents(indexedTorrents)
}()
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(Response{
2024-02-12 17:04:00 +00:00
Results: indexedTorrents,
Count: len(indexedTorrents),
})
if err != nil {
fmt.Println(err)
}
}
func getTorrents(ctx context.Context, i *Indexer, link string) ([]schema.IndexedTorrent, error) {
var indexedTorrents []schema.IndexedTorrent
doc, err := getDocument(ctx, i, link)
if err != nil {
return nil, err
}
article := doc.Find("article")
2023-09-22 22:12:38 -03:00
title := strings.Replace(article.Find(".entry-title").Text(), " - Download", "", -1)
textContent := article.Find("div.entry-content")
2023-09-22 22:12:38 -03:00
// div itemprop="datePublished"
datePublished := strings.TrimSpace(article.Find("div[itemprop=\"datePublished\"]").Text())
// pattern: 10 de setembro de 2021
date, err := parseLocalizedDate(datePublished)
if err != nil {
return nil, err
2023-09-22 22:12:38 -03:00
}
magnets := textContent.Find("a[href^=\"magnet\"]")
var magnetLinks []string
magnets.Each(func(i int, s *goquery.Selection) {
magnetLink, _ := s.Attr("href")
magnetLinks = append(magnetLinks, magnetLink)
})
var audio []schema.Audio
var year string
var size []string
article.Find("div.entry-content > p").Each(func(i int, s *goquery.Selection) {
// pattern:
// Título Traduzido: Fundação
// Título Original: Foundation
// IMDb: 7,5
// Ano de Lançamento: 2023
// Gênero: Ação | Aventura | Ficção
// Formato: MKV
// Qualidade: WEB-DL
// Áudio: Português | Inglês
// Idioma: Português | Inglês
// Legenda: Português
// Tamanho:
// Qualidade de Áudio: 10
// Qualidade de Vídeo: 10
// Duração: 59 Min.
// Servidor: Torrent
2023-09-22 21:13:57 -03:00
text := s.Text()
audio = append(audio, findAudioFromText(text)...)
2024-03-10 12:48:48 +00:00
y := findYearFromText(text, title)
if y != "" {
year = y
}
size = append(size, findSizesFromText(text)...)
})
2024-02-12 17:04:00 +00:00
// find any link from imdb
imdbLink := ""
article.Find("a").Each(func(i int, s *goquery.Selection) {
2024-02-12 17:04:00 +00:00
link, _ := s.Attr("href")
_imdbLink, err := getIMDBLink(link)
if err == nil {
imdbLink = _imdbLink
2024-02-12 17:04:00 +00:00
}
})
size = stableUniq(size)
var chanIndexedTorrent = make(chan schema.IndexedTorrent)
2023-09-23 17:41:58 +00:00
// for each magnet link, create a new indexed torrent
for it, magnetLink := range magnetLinks {
it := it
go func(it int, magnetLink string) {
magnet, err := magnet.ParseMagnetUri(magnetLink)
if err != nil {
fmt.Println(err)
}
releaseTitle := magnet.DisplayName
infoHash := magnet.InfoHash.String()
trackers := magnet.Trackers
2023-09-23 17:41:58 +00:00
magnetAudio := []schema.Audio{}
2023-10-01 20:27:44 +00:00
if strings.Contains(strings.ToLower(releaseTitle), "dual") || strings.Contains(strings.ToLower(releaseTitle), "dublado") {
2023-09-23 17:41:58 +00:00
magnetAudio = append(magnetAudio, audio...)
} else if len(audio) > 1 {
// remove portuguese audio, and append to magnetAudio
for _, a := range audio {
if a != schema.AudioPortuguese {
magnetAudio = append(magnetAudio, a)
}
2023-09-22 21:13:57 -03:00
}
2023-09-23 17:41:58 +00:00
} else {
magnetAudio = append(magnetAudio, audio...)
2023-09-22 21:13:57 -03:00
}
2023-09-22 22:12:38 -03:00
peer, seed, err := goscrape.GetLeechsAndSeeds(ctx, i.redis, i.metrics, infoHash, trackers)
2023-09-23 17:41:58 +00:00
if err != nil {
fmt.Println(err)
}
title := processTitle(title, magnetAudio)
// if the number of sizes is equal to the number of magnets, then assign the size to each indexed torrent in order
var mySize string
if len(size) == len(magnetLinks) {
mySize = size[it]
}
ixt := schema.IndexedTorrent{
Title: appendAudioISO639_2Code(releaseTitle, magnetAudio),
2023-09-23 17:41:58 +00:00
OriginalTitle: title,
Details: link,
Year: year,
2024-02-12 17:04:00 +00:00
IMDB: imdbLink,
2023-09-23 17:41:58 +00:00
Audio: magnetAudio,
MagnetLink: magnetLink,
Date: date,
InfoHash: infoHash,
Trackers: trackers,
LeechCount: peer,
SeedCount: seed,
Size: mySize,
2023-09-23 17:41:58 +00:00
}
chanIndexedTorrent <- ixt
}(it, magnetLink)
2023-09-23 17:41:58 +00:00
}
2023-09-23 17:41:58 +00:00
for i := 0; i < len(magnetLinks); i++ {
it := <-chanIndexedTorrent
indexedTorrents = append(indexedTorrents, it)
}
return indexedTorrents, nil
}
func getIMDBLink(link string) (string, error) {
var imdbLink string
re := regexp.MustCompile(`https://www.imdb.com(/[a-z]{2})?/title/(tt\d+)/?`)
matches := re.FindStringSubmatch(link)
if len(matches) > 0 {
imdbLink = matches[0]
} else {
return "", fmt.Errorf("no imdb link found")
}
return imdbLink, nil
}
func parseLocalizedDate(datePublished string) (time.Time, error) {
re := regexp.MustCompile(`(\d{1,2}) de (\w+) de (\d{4})`)
matches := re.FindStringSubmatch(datePublished)
if len(matches) > 0 {
day := matches[1]
// append 0 to single digit day
if len(day) == 1 {
day = fmt.Sprintf("0%s", day)
}
month := matches[2]
year := matches[3]
datePublished = fmt.Sprintf("%s-%s-%s", year, replacer.Replace(month), day)
date, err := time.Parse("2006-01-02", datePublished)
if err != nil {
return time.Time{}, err
}
return date, nil
}
return time.Time{}, nil
}
func stableUniq(s []string) []string {
var uniq []map[string]interface{}
m := make(map[string]map[string]interface{})
for i, v := range s {
m[v] = map[string]interface{}{
"v": v,
"i": i,
}
}
// to order by index
for _, v := range m {
uniq = append(uniq, v)
}
// sort by index
for i := 0; i < len(uniq); i++ {
for j := i + 1; j < len(uniq); j++ {
if uniq[i]["i"].(int) > uniq[j]["i"].(int) {
uniq[i], uniq[j] = uniq[j], uniq[i]
}
}
}
// get only values
var uniqValues []string
for _, v := range uniq {
uniqValues = append(uniqValues, v["v"].(string))
}
return uniqValues
}
func findYearFromText(text string, title string) (year string) {
re := regexp.MustCompile(`Lançamento: (.*)`)
yearMatch := re.FindStringSubmatch(text)
if len(yearMatch) > 0 {
year = yearMatch[1]
}
if year == "" {
re = regexp.MustCompile(`\((\d{4})\)`)
yearMatch := re.FindStringSubmatch(title)
if len(yearMatch) > 0 {
year = yearMatch[1]
}
}
return strings.TrimSpace(year)
}
func findAudioFromText(text string) []schema.Audio {
var audio []schema.Audio
re := regexp.MustCompile(`(.udio|Idioma):.?(.*)`)
audioMatch := re.FindStringSubmatch(text)
if len(audioMatch) > 0 {
sep := getSeparator(audioMatch[2])
langs_raw := strings.Split(audioMatch[2], sep)
for _, lang := range langs_raw {
lang = strings.TrimSpace(lang)
a := schema.GetAudioFromString(lang)
if a != nil {
audio = append(audio, *a)
} else {
fmt.Println("unknown language:", lang)
}
}
}
return audio
}
func findSizesFromText(text string) []string {
var sizes []string
// everything that ends with GB or MB, using ',' or '.' as decimal separator
re := regexp.MustCompile(`(\d+[\.,]?\d+) ?(GB|MB)`)
sizesMatch := re.FindAllStringSubmatch(text, -1)
if len(sizesMatch) > 0 {
for _, size := range sizesMatch {
sizes = append(sizes, size[0])
}
}
return sizes
}
func processTitle(title string, a []schema.Audio) string {
// remove ' - Donwload' from title
title = strings.Replace(title, " Download", "", -1)
// remove 'comando.la' from title
title = strings.Replace(title, "comando.la", "", -1)
// add audio ISO 639-2 code to title between ()
title = appendAudioISO639_2Code(title, a)
return title
}
func appendAudioISO639_2Code(title string, a []schema.Audio) string {
if len(a) > 0 {
audio := []string{}
for _, lang := range a {
audio = append(audio, lang.String())
}
title = fmt.Sprintf("%s (%s)", title, strings.Join(audio, ", "))
}
return title
}
func getSeparator(s string) string {
if strings.Contains(s, "|") {
return "|"
} else if strings.Contains(s, ",") {
return ","
}
return " "
}
func getDocument(ctx context.Context, i *Indexer, link string) (*goquery.Document, error) {
// try to get from redis first
docCache, err := i.redis.Get(ctx, link)
if err == nil {
i.metrics.CacheHits.WithLabelValues("document_body").Inc()
fmt.Printf("returning from long-lived cache: %s\n", link)
return goquery.NewDocumentFromReader(io.NopCloser(bytes.NewReader(docCache)))
}
defer i.metrics.CacheMisses.WithLabelValues("document_body").Inc()
resp, err := i.requester.GetDocument(ctx, link)
if err != nil {
return nil, err
}
defer resp.Close()
body, err := io.ReadAll(resp)
if err != nil {
return nil, err
}
// set cache
err = i.redis.Set(ctx, link, body)
if err != nil {
fmt.Println(err)
}
doc, err := goquery.NewDocumentFromReader(io.NopCloser(bytes.NewReader(body)))
if err != nil {
return nil, err
}
return doc, nil
}