2023-09-23 17:02:55 +00:00
|
|
|
|
package handler
|
2023-09-22 23:50:22 +00:00
|
|
|
|
|
|
|
|
|
|
import (
|
2023-09-23 17:02:55 +00:00
|
|
|
|
"bytes"
|
|
|
|
|
|
"context"
|
2023-09-22 23:50:22 +00:00
|
|
|
|
"encoding/json"
|
|
|
|
|
|
"fmt"
|
2023-09-23 17:02:55 +00:00
|
|
|
|
"io/ioutil"
|
2023-09-22 23:50:22 +00:00
|
|
|
|
"net/http"
|
|
|
|
|
|
"net/url"
|
|
|
|
|
|
"regexp"
|
2023-09-22 21:13:57 -03:00
|
|
|
|
"strings"
|
2023-09-22 22:12:38 -03:00
|
|
|
|
"time"
|
2023-09-22 23:50:22 +00:00
|
|
|
|
|
|
|
|
|
|
"github.com/PuerkitoBio/goquery"
|
2023-09-23 17:02:55 +00:00
|
|
|
|
"github.com/felipemarinho97/torrent-indexer/schema"
|
|
|
|
|
|
goscrape "github.com/felipemarinho97/torrent-indexer/scrape"
|
2023-09-22 23:50:22 +00:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
const (
|
|
|
|
|
|
URL = "https://comando.la/"
|
|
|
|
|
|
queryFilter = "?s="
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2023-09-22 22:12:38 -03:00
|
|
|
|
var replacer = strings.NewReplacer(
|
|
|
|
|
|
"janeiro", "01",
|
|
|
|
|
|
"fevereiro", "02",
|
|
|
|
|
|
"março", "03",
|
|
|
|
|
|
"abril", "04",
|
|
|
|
|
|
"maio", "05",
|
|
|
|
|
|
"junho", "06",
|
|
|
|
|
|
"julho", "07",
|
|
|
|
|
|
"agosto", "08",
|
|
|
|
|
|
"setembro", "09",
|
|
|
|
|
|
"outubro", "10",
|
|
|
|
|
|
"novembro", "11",
|
|
|
|
|
|
"dezembro", "12",
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2023-09-22 23:50:22 +00:00
|
|
|
|
type IndexedTorrent struct {
|
2023-09-23 17:02:55 +00:00
|
|
|
|
Title string `json:"title"`
|
|
|
|
|
|
OriginalTitle string `json:"original_title"`
|
|
|
|
|
|
Details string `json:"details"`
|
|
|
|
|
|
Year string `json:"year"`
|
|
|
|
|
|
Audio []schema.Audio `json:"audio"`
|
|
|
|
|
|
MagnetLink string `json:"magnet_link"`
|
|
|
|
|
|
Date time.Time `json:"date"`
|
|
|
|
|
|
InfoHash string `json:"info_hash"`
|
|
|
|
|
|
Trackers []string `json:"trackers"`
|
|
|
|
|
|
LeechCount int `json:"leech_count"`
|
|
|
|
|
|
SeedCount int `json:"seed_count"`
|
2023-09-22 23:50:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-23 17:02:55 +00:00
|
|
|
|
func (i *Indexer) HandlerComandoIndexer(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
ctx := r.Context()
|
2023-09-22 23:50:22 +00:00
|
|
|
|
// supported query params: q, season, episode
|
|
|
|
|
|
q := r.URL.Query().Get("q")
|
|
|
|
|
|
|
|
|
|
|
|
// URL encode query param
|
|
|
|
|
|
q = url.QueryEscape(q)
|
2023-09-23 17:02:55 +00:00
|
|
|
|
url := URL
|
|
|
|
|
|
if q != "" {
|
|
|
|
|
|
url = fmt.Sprintf("%s%s%s", URL, queryFilter, q)
|
|
|
|
|
|
}
|
2023-09-22 23:50:22 +00:00
|
|
|
|
|
|
|
|
|
|
fmt.Println("URL:>", url)
|
|
|
|
|
|
resp, err := http.Get(url)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
w.WriteHeader(http.StatusInternalServerError)
|
|
|
|
|
|
json.NewEncoder(w).Encode(map[string]string{"error": err.Error()})
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
|
|
|
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
w.WriteHeader(http.StatusInternalServerError)
|
|
|
|
|
|
json.NewEncoder(w).Encode(map[string]string{"error": err.Error()})
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var links []string
|
|
|
|
|
|
doc.Find("article").Each(func(i int, s *goquery.Selection) {
|
|
|
|
|
|
// get link from h2.entry-title > a
|
|
|
|
|
|
link, _ := s.Find("h2.entry-title > a").Attr("href")
|
|
|
|
|
|
links = append(links, link)
|
|
|
|
|
|
})
|
|
|
|
|
|
|
2023-09-22 22:12:38 -03:00
|
|
|
|
var itChan = make(chan []IndexedTorrent)
|
|
|
|
|
|
var errChan = make(chan error)
|
2023-09-22 23:50:22 +00:00
|
|
|
|
var indexedTorrents []IndexedTorrent
|
|
|
|
|
|
for _, link := range links {
|
2023-09-22 22:12:38 -03:00
|
|
|
|
go func(link string) {
|
2023-09-23 17:02:55 +00:00
|
|
|
|
torrents, err := getTorrents(ctx, i, link)
|
2023-09-22 22:12:38 -03:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
fmt.Println(err)
|
|
|
|
|
|
errChan <- err
|
|
|
|
|
|
}
|
|
|
|
|
|
itChan <- torrents
|
|
|
|
|
|
}(link)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for i := 0; i < len(links); i++ {
|
|
|
|
|
|
select {
|
|
|
|
|
|
case torrents := <-itChan:
|
|
|
|
|
|
indexedTorrents = append(indexedTorrents, torrents...)
|
|
|
|
|
|
case err := <-errChan:
|
2023-09-22 23:50:22 +00:00
|
|
|
|
fmt.Println(err)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
|
|
json.NewEncoder(w).Encode(indexedTorrents)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-23 17:02:55 +00:00
|
|
|
|
func getTorrents(ctx context.Context, i *Indexer, link string) ([]IndexedTorrent, error) {
|
2023-09-22 23:50:22 +00:00
|
|
|
|
var indexedTorrents []IndexedTorrent
|
2023-09-23 17:02:55 +00:00
|
|
|
|
doc, err := getDocument(ctx, i, link)
|
2023-09-22 23:50:22 +00:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
article := doc.Find("article")
|
2023-09-22 22:12:38 -03:00
|
|
|
|
title := strings.Replace(article.Find(".entry-title").Text(), " - Download", "", -1)
|
2023-09-22 23:50:22 +00:00
|
|
|
|
textContent := article.Find("div.entry-content")
|
2023-09-22 22:12:38 -03:00
|
|
|
|
// div itemprop="datePublished"
|
|
|
|
|
|
datePublished := strings.TrimSpace(article.Find("div[itemprop=\"datePublished\"]").Text())
|
|
|
|
|
|
// pattern: 10 de setembro de 2021
|
|
|
|
|
|
re := regexp.MustCompile(`(\d{2}) de (\w+) de (\d{4})`)
|
|
|
|
|
|
matches := re.FindStringSubmatch(datePublished)
|
|
|
|
|
|
var date time.Time
|
|
|
|
|
|
if len(matches) > 0 {
|
|
|
|
|
|
day := matches[1]
|
|
|
|
|
|
month := matches[2]
|
|
|
|
|
|
year := matches[3]
|
|
|
|
|
|
datePublished = fmt.Sprintf("%s-%s-%s", year, replacer.Replace(month), day)
|
|
|
|
|
|
date, err = time.Parse("2006-01-02", datePublished)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-09-22 23:50:22 +00:00
|
|
|
|
magnets := textContent.Find("a[href^=\"magnet\"]")
|
|
|
|
|
|
var magnetLinks []string
|
|
|
|
|
|
magnets.Each(func(i int, s *goquery.Selection) {
|
|
|
|
|
|
magnetLink, _ := s.Attr("href")
|
|
|
|
|
|
magnetLinks = append(magnetLinks, magnetLink)
|
|
|
|
|
|
})
|
|
|
|
|
|
|
2023-09-23 17:02:55 +00:00
|
|
|
|
var audio []schema.Audio
|
2023-09-22 23:50:22 +00:00
|
|
|
|
var year string
|
|
|
|
|
|
article.Find("div.entry-content > p").Each(func(i int, s *goquery.Selection) {
|
|
|
|
|
|
// pattern:
|
|
|
|
|
|
// Título Traduzido: Fundação
|
|
|
|
|
|
// Título Original: Foundation
|
|
|
|
|
|
// IMDb: 7,5
|
|
|
|
|
|
// Ano de Lançamento: 2023
|
|
|
|
|
|
// Gênero: Ação | Aventura | Ficção
|
|
|
|
|
|
// Formato: MKV
|
|
|
|
|
|
// Qualidade: WEB-DL
|
|
|
|
|
|
// Áudio: Português | Inglês
|
2023-09-23 17:02:55 +00:00
|
|
|
|
// Idioma: Português | Inglês
|
2023-09-22 23:50:22 +00:00
|
|
|
|
// Legenda: Português
|
|
|
|
|
|
// Tamanho: –
|
|
|
|
|
|
// Qualidade de Áudio: 10
|
|
|
|
|
|
// Qualidade de Vídeo: 10
|
|
|
|
|
|
// Duração: 59 Min.
|
|
|
|
|
|
// Servidor: Torrent
|
2023-09-22 21:13:57 -03:00
|
|
|
|
text := s.Text()
|
2023-09-22 23:50:22 +00:00
|
|
|
|
|
2023-09-23 17:02:55 +00:00
|
|
|
|
//re := regexp.MustCompile(`Áudio: (.*)`)
|
|
|
|
|
|
re := regexp.MustCompile(`(Áudio|Idioma): (.*)`)
|
2023-09-22 21:13:57 -03:00
|
|
|
|
audioMatch := re.FindStringSubmatch(text)
|
2023-09-22 23:50:22 +00:00
|
|
|
|
if len(audioMatch) > 0 {
|
2023-09-23 17:02:55 +00:00
|
|
|
|
sep := getSeparator(audioMatch[2])
|
|
|
|
|
|
langs_raw := strings.Split(audioMatch[2], sep)
|
2023-09-22 21:13:57 -03:00
|
|
|
|
for _, lang := range langs_raw {
|
|
|
|
|
|
lang = strings.TrimSpace(lang)
|
2023-09-23 17:02:55 +00:00
|
|
|
|
a := schema.GetAudioFromString(lang)
|
|
|
|
|
|
if a != nil {
|
|
|
|
|
|
audio = append(audio, *a)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
fmt.Println("unknown language:", lang)
|
2023-09-22 21:13:57 -03:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-09-22 23:50:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-22 21:13:57 -03:00
|
|
|
|
re = regexp.MustCompile(`Lançamento: (.*)`)
|
|
|
|
|
|
yearMatch := re.FindStringSubmatch(text)
|
2023-09-22 23:50:22 +00:00
|
|
|
|
if len(yearMatch) > 0 {
|
|
|
|
|
|
year = yearMatch[1]
|
|
|
|
|
|
}
|
2023-09-22 21:13:57 -03:00
|
|
|
|
|
|
|
|
|
|
// if year is empty, try to get it from title
|
|
|
|
|
|
if year == "" {
|
|
|
|
|
|
re = regexp.MustCompile(`\((\d{4})\)`)
|
|
|
|
|
|
yearMatch := re.FindStringSubmatch(title)
|
|
|
|
|
|
if len(yearMatch) > 0 {
|
|
|
|
|
|
year = yearMatch[1]
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-09-22 23:50:22 +00:00
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
// for each magnet link, create a new indexed torrent
|
|
|
|
|
|
for _, magnetLink := range magnetLinks {
|
2023-09-22 21:13:57 -03:00
|
|
|
|
releaseTitle := extractReleaseName(magnetLink)
|
2023-09-23 17:02:55 +00:00
|
|
|
|
magnetAudio := []schema.Audio{}
|
2023-09-22 21:13:57 -03:00
|
|
|
|
if strings.Contains(strings.ToLower(releaseTitle), "dual") {
|
|
|
|
|
|
magnetAudio = append(magnetAudio, audio...)
|
2023-09-23 17:02:55 +00:00
|
|
|
|
} else if len(audio) > 1 {
|
|
|
|
|
|
// remove portuguese audio, and append to magnetAudio
|
|
|
|
|
|
for _, a := range audio {
|
|
|
|
|
|
if a != schema.AudioPortuguese {
|
|
|
|
|
|
magnetAudio = append(magnetAudio, a)
|
2023-09-22 21:13:57 -03:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-09-23 17:02:55 +00:00
|
|
|
|
} else {
|
|
|
|
|
|
magnetAudio = append(magnetAudio, audio...)
|
2023-09-22 21:13:57 -03:00
|
|
|
|
}
|
2023-09-22 22:12:38 -03:00
|
|
|
|
// decode url encoded title
|
|
|
|
|
|
releaseTitle, _ = url.QueryUnescape(releaseTitle)
|
|
|
|
|
|
|
|
|
|
|
|
infoHash := extractInfoHash(magnetLink)
|
2023-09-23 17:02:55 +00:00
|
|
|
|
trackers := extractTrackers(magnetLink)
|
|
|
|
|
|
peer, seed, err := goscrape.GetLeechsAndSeeds(ctx, i.redis, infoHash, trackers)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
fmt.Println(err)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
title := processTitle(title, magnetAudio)
|
2023-09-22 21:13:57 -03:00
|
|
|
|
|
2023-09-22 23:50:22 +00:00
|
|
|
|
indexedTorrents = append(indexedTorrents, IndexedTorrent{
|
2023-09-22 21:13:57 -03:00
|
|
|
|
Title: releaseTitle,
|
2023-09-22 23:50:22 +00:00
|
|
|
|
OriginalTitle: title,
|
|
|
|
|
|
Details: link,
|
|
|
|
|
|
Year: year,
|
2023-09-22 21:13:57 -03:00
|
|
|
|
Audio: magnetAudio,
|
2023-09-22 23:50:22 +00:00
|
|
|
|
MagnetLink: magnetLink,
|
2023-09-22 22:12:38 -03:00
|
|
|
|
Date: date,
|
|
|
|
|
|
InfoHash: infoHash,
|
2023-09-23 17:02:55 +00:00
|
|
|
|
Trackers: trackers,
|
|
|
|
|
|
LeechCount: peer,
|
|
|
|
|
|
SeedCount: seed,
|
2023-09-22 23:50:22 +00:00
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return indexedTorrents, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-23 17:02:55 +00:00
|
|
|
|
func processTitle(title string, a []schema.Audio) string {
|
|
|
|
|
|
// remove ' - Donwload' from title
|
|
|
|
|
|
title = strings.Replace(title, " - Download", "", -1)
|
|
|
|
|
|
|
|
|
|
|
|
// remove 'comando.la' from title
|
|
|
|
|
|
title = strings.Replace(title, "comando.la", "", -1)
|
|
|
|
|
|
|
|
|
|
|
|
// add audio ISO 639-2 code to title between ()
|
|
|
|
|
|
if len(a) > 0 {
|
|
|
|
|
|
audio := []string{}
|
|
|
|
|
|
for _, lang := range a {
|
|
|
|
|
|
audio = append(audio, lang.String())
|
|
|
|
|
|
}
|
|
|
|
|
|
title = fmt.Sprintf("%s (%s)", title, strings.Join(audio, ", "))
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return title
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func getSeparator(s string) string {
|
|
|
|
|
|
if strings.Contains(s, "|") {
|
|
|
|
|
|
return "|"
|
|
|
|
|
|
} else if strings.Contains(s, ",") {
|
|
|
|
|
|
return ","
|
|
|
|
|
|
}
|
|
|
|
|
|
return " "
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func getDocument(ctx context.Context, i *Indexer, link string) (*goquery.Document, error) {
|
|
|
|
|
|
// try to get from redis first
|
|
|
|
|
|
docCache, err := i.redis.Get(ctx, link)
|
|
|
|
|
|
if err == nil {
|
|
|
|
|
|
return goquery.NewDocumentFromReader(ioutil.NopCloser(bytes.NewReader(docCache)))
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
resp, err := http.Get(link)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// set cache
|
|
|
|
|
|
err = i.redis.Set(ctx, link, body)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
fmt.Println(err)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
doc, err := goquery.NewDocumentFromReader(ioutil.NopCloser(bytes.NewReader(body)))
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return doc, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-22 23:50:22 +00:00
|
|
|
|
func extractReleaseName(magnetLink string) string {
|
2023-09-22 21:13:57 -03:00
|
|
|
|
re := regexp.MustCompile(`dn=(.*?)&`)
|
2023-09-22 23:50:22 +00:00
|
|
|
|
matches := re.FindStringSubmatch(magnetLink)
|
|
|
|
|
|
if len(matches) > 0 {
|
|
|
|
|
|
return matches[1]
|
|
|
|
|
|
}
|
|
|
|
|
|
return ""
|
|
|
|
|
|
}
|
2023-09-22 21:13:57 -03:00
|
|
|
|
|
2023-09-22 22:12:38 -03:00
|
|
|
|
func extractInfoHash(magnetLink string) string {
|
|
|
|
|
|
re := regexp.MustCompile(`btih:(.*?)&`)
|
|
|
|
|
|
matches := re.FindStringSubmatch(magnetLink)
|
|
|
|
|
|
if len(matches) > 0 {
|
|
|
|
|
|
return matches[1]
|
|
|
|
|
|
}
|
|
|
|
|
|
return ""
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-23 17:02:55 +00:00
|
|
|
|
func extractTrackers(magnetLink string) []string {
|
|
|
|
|
|
re := regexp.MustCompile(`tr=(.*?)&`)
|
|
|
|
|
|
matches := re.FindAllStringSubmatch(magnetLink, -1)
|
|
|
|
|
|
var trackers []string
|
|
|
|
|
|
for _, match := range matches {
|
|
|
|
|
|
// url decode
|
|
|
|
|
|
tracker, _ := url.QueryUnescape(match[1])
|
|
|
|
|
|
trackers = append(trackers, tracker)
|
2023-09-22 21:13:57 -03:00
|
|
|
|
}
|
2023-09-23 17:02:55 +00:00
|
|
|
|
return trackers
|
2023-09-22 21:13:57 -03:00
|
|
|
|
}
|