diff --git a/app/app.go b/app/app.go
new file mode 100644
index 000000000..0f2026796
--- /dev/null
+++ b/app/app.go
@@ -0,0 +1,338 @@
+package app
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "sort"
+ "strings"
+
+ "github.com/fatih/color"
+ "github.com/go-rod/rod"
+ "github.com/urfave/cli/v2"
+
+ "github.com/iawia002/lux/cookier"
+ "github.com/iawia002/lux/downloader"
+ "github.com/iawia002/lux/extractors"
+ "github.com/iawia002/lux/request"
+ "github.com/iawia002/lux/utils"
+)
+
+const (
+ // Name is the name of this app.
+ Name = "lux"
+ version = "v0.12.0"
+)
+
+func init() {
+ cli.VersionPrinter = func(c *cli.Context) {
+ blue := color.New(color.FgBlue)
+ cyan := color.New(color.FgCyan)
+ fmt.Fprintf(
+ color.Output,
+ "\n%s: version %s, A fast and simple video downloader.\n\n",
+ cyan.Sprintf(Name),
+ blue.Sprintf(c.App.Version),
+ )
+ }
+}
+
+// New returns the App instance.
+func New() *cli.App {
+ app := &cli.App{
+ Name: Name,
+ Usage: "A fast and simple video downloader.",
+ Version: version,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ Aliases: []string{"d"},
+ Usage: "Debug mode",
+ },
+ &cli.BoolFlag{
+ Name: "silent",
+ Aliases: []string{"s"},
+ Usage: "Minimum outputs",
+ },
+ &cli.BoolFlag{
+ Name: "info",
+ Aliases: []string{"i"},
+ Usage: "Information only",
+ },
+ &cli.BoolFlag{
+ Name: "json",
+ Aliases: []string{"j"},
+ Usage: "Print extracted JSON data",
+ },
+
+ &cli.StringFlag{
+ Name: "cookie",
+ Aliases: []string{"c"},
+ Usage: "Cookie",
+ },
+ &cli.BoolFlag{
+ Name: "playlist",
+ Aliases: []string{"p"},
+ Usage: "Download playlist",
+ },
+ &cli.StringFlag{
+ Name: "user-agent",
+ Aliases: []string{"u"},
+ Usage: "Use specified User-Agent",
+ },
+ &cli.StringFlag{
+ Name: "refer",
+ Aliases: []string{"r"},
+ Usage: "Use specified Referrer",
+ },
+ &cli.StringFlag{
+ Name: "stream-format",
+ Aliases: []string{"f"},
+ Usage: "Select specific stream to download",
+ },
+ &cli.StringFlag{
+ Name: "file",
+ Aliases: []string{"F"},
+ Usage: "URLs file path",
+ },
+ &cli.StringFlag{
+ Name: "output-path",
+ Aliases: []string{"o"},
+ Usage: "Specify the output path",
+ },
+ &cli.StringFlag{
+ Name: "output-name",
+ Aliases: []string{"O"},
+ Usage: "Specify the output file name",
+ },
+ &cli.UintFlag{
+ Name: "file-name-length",
+ Value: 255,
+ Usage: "The maximum length of a file name, 0 means unlimited",
+ },
+ &cli.BoolFlag{
+ Name: "caption",
+ Aliases: []string{"C"},
+ Usage: "Download captions",
+ },
+
+ &cli.UintFlag{
+ Name: "start",
+ Value: 1,
+ Usage: "Define the starting item of a playlist or a file input",
+ },
+ &cli.UintFlag{
+ Name: "end",
+ Value: 0,
+ Usage: "Define the ending item of a playlist or a file input",
+ },
+ &cli.StringFlag{
+ Name: "items",
+ Usage: "Define wanted items from a file or playlist. Separated by commas like: 1,5,6,8-10",
+ },
+
+ &cli.BoolFlag{
+ Name: "multi-thread",
+ Aliases: []string{"m"},
+ Usage: "Multiple threads to download single video",
+ },
+ &cli.UintFlag{
+ Name: "retry",
+ Value: 10,
+ Usage: "How many times to retry when the download failed",
+ },
+ &cli.UintFlag{
+ Name: "chunk-size",
+ Aliases: []string{"cs"},
+ Value: 1,
+ Usage: "HTTP chunk size for downloading (in MB)",
+ },
+ &cli.UintFlag{
+ Name: "thread",
+ Aliases: []string{"n"},
+ Value: 10,
+ Usage: "The number of download thread (only works for multiple-parts video)",
+ },
+
+ // Aria2
+ &cli.BoolFlag{
+ Name: "aria2",
+ Usage: "Use Aria2 RPC to download",
+ },
+ &cli.StringFlag{
+ Name: "aria2-token",
+ Usage: "Aria2 RPC Token",
+ },
+ &cli.StringFlag{
+ Name: "aria2-addr",
+ Value: "localhost:6800",
+ Usage: "Aria2 Address",
+ },
+ &cli.StringFlag{
+ Name: "aria2-method",
+ Value: "http",
+ Usage: "Aria2 Method",
+ },
+
+ // youku
+ &cli.StringFlag{
+ Name: "youku-ccode",
+ Aliases: []string{"ccode"},
+ Value: "0532",
+ Usage: "Youku ccode",
+ },
+ &cli.StringFlag{
+ Name: "youku-ckey",
+ Aliases: []string{"ckey"},
+ Value: "7B19C0AB12633B22E7FE81271162026020570708D6CC189E4924503C49D243A0DE6CD84A766832C2C99898FC5ED31F3709BB3CDD82C96492E721BDD381735026",
+ Usage: "Youku ckey",
+ },
+ &cli.StringFlag{
+ Name: "youku-password",
+ Aliases: []string{"password"},
+ Usage: "Youku password",
+ },
+
+ &cli.BoolFlag{
+ Name: "episode-title-only",
+ Aliases: []string{"eto"},
+ Usage: "File name of each bilibili episode doesn't include the playlist title",
+ },
+ },
+ Action: func(c *cli.Context) error {
+ args := c.Args().Slice()
+
+ if c.Bool("debug") {
+ cli.VersionPrinter(c)
+ }
+
+ if file := c.String("file"); file != "" {
+ f, err := os.Open(file)
+ if err != nil {
+ return err
+ }
+ defer f.Close() // nolint
+
+ fileItems := utils.ParseInputFile(f, c.String("items"), int(c.Uint("start")), int(c.Uint("end")))
+ args = append(args, fileItems...)
+ }
+
+ if len(args) < 1 {
+ return errors.New("too few arguments")
+ }
+
+ cookie := c.String("cookie")
+ if cookie != "" {
+ // If cookie is a file path, convert it to a string to ensure cookie is always string
+ if _, fileErr := os.Stat(cookie); fileErr == nil {
+ // Cookie is a file
+ data, err := ioutil.ReadFile(cookie)
+ if err != nil {
+ return err
+ }
+ cookie = strings.TrimSpace(string(data))
+ }
+ } else {
+ // Try to use current user's cookie if possible, if failed empty cookie will be used
+ _ = rod.Try(func() {
+ cookie = cookier.Get(args...)
+ })
+ }
+
+ request.SetOptions(request.Options{
+ RetryTimes: int(c.Uint("retry")),
+ Cookie: cookie,
+ UserAgent: c.String("user-agent"),
+ Refer: c.String("refer"),
+ Debug: c.Bool("debug"),
+ Silent: c.Bool("silent"),
+ })
+
+ var isErr bool
+ for _, videoURL := range args {
+ if err := download(c, videoURL); err != nil {
+ fmt.Fprintf(
+ color.Output,
+ "Downloading %s error:\n%s\n",
+ color.CyanString("%s", videoURL), color.RedString("%v", err),
+ )
+ isErr = true
+ }
+ }
+ if isErr {
+ return cli.Exit("", 1)
+ }
+ return nil
+ },
+ EnableBashCompletion: true,
+ }
+
+ sort.Sort(cli.FlagsByName(app.Flags))
+ return app
+}
+
+func download(c *cli.Context, videoURL string) error {
+ data, err := extractors.Extract(videoURL, extractors.Options{
+ Playlist: c.Bool("playlist"),
+ Items: c.String("items"),
+ ItemStart: int(c.Uint("start")),
+ ItemEnd: int(c.Uint("end")),
+ ThreadNumber: int(c.Uint("thread")),
+ EpisodeTitleOnly: c.Bool("episode-title-only"),
+ Cookie: c.String("cookie"),
+ YoukuCcode: c.String("youku-ccode"),
+ YoukuCkey: c.String("youku-ckey"),
+ YoukuPassword: c.String("youku-password"),
+ })
+ if err != nil {
+ // if this error occurs, it means that an error occurred before actually starting to extract data
+ // (there is an error in the preparation step), and the data list is empty.
+ return err
+ }
+
+ if c.Bool("json") {
+ jsonData, err := json.MarshalIndent(data, "", "\t")
+ if err != nil {
+ return err
+ }
+ fmt.Printf("%s\n", jsonData)
+ return nil
+ }
+
+ defaultDownloader := downloader.New(downloader.Options{
+ Silent: c.Bool("silent"),
+ InfoOnly: c.Bool("info"),
+ Stream: c.String("stream-format"),
+ Refer: c.String("refer"),
+ OutputPath: c.String("output-path"),
+ OutputName: c.String("output-name"),
+ FileNameLength: int(c.Uint("file-name-length")),
+ Caption: c.Bool("caption"),
+ MultiThread: c.Bool("multi-thread"),
+ ThreadNumber: int(c.Uint("thread")),
+ RetryTimes: int(c.Uint("retry")),
+ ChunkSizeMB: int(c.Uint("chunk-size")),
+ UseAria2RPC: c.Bool("aria2"),
+ Aria2Token: c.String("aria2-token"),
+ Aria2Method: c.String("aria2-method"),
+ Aria2Addr: c.String("aria2-addr"),
+ })
+ errors := make([]error, 0)
+ for _, item := range data {
+ if item.Err != nil {
+ // if this error occurs, the preparation step is normal, but the data extraction is wrong.
+ // the data is an empty struct.
+ errors = append(errors, item.Err)
+ continue
+ }
+ if err = defaultDownloader.Download(item); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ if len(errors) != 0 {
+ return errors[0]
+ }
+ return nil
+}
diff --git a/app/register.go b/app/register.go
new file mode 100644
index 000000000..4adaad0b2
--- /dev/null
+++ b/app/register.go
@@ -0,0 +1,37 @@
+package app
+
+import (
+ _ "github.com/iawia002/lux/extractors/acfun"
+ _ "github.com/iawia002/lux/extractors/bcy"
+ _ "github.com/iawia002/lux/extractors/bilibili"
+ _ "github.com/iawia002/lux/extractors/douyin"
+ _ "github.com/iawia002/lux/extractors/douyu"
+ _ "github.com/iawia002/lux/extractors/eporner"
+ _ "github.com/iawia002/lux/extractors/facebook"
+ _ "github.com/iawia002/lux/extractors/geekbang"
+ _ "github.com/iawia002/lux/extractors/haokan"
+ _ "github.com/iawia002/lux/extractors/hupu"
+ _ "github.com/iawia002/lux/extractors/huya"
+ _ "github.com/iawia002/lux/extractors/instagram"
+ _ "github.com/iawia002/lux/extractors/iqiyi"
+ _ "github.com/iawia002/lux/extractors/mgtv"
+ _ "github.com/iawia002/lux/extractors/miaopai"
+ _ "github.com/iawia002/lux/extractors/netease"
+ _ "github.com/iawia002/lux/extractors/pixivision"
+ _ "github.com/iawia002/lux/extractors/pornhub"
+ _ "github.com/iawia002/lux/extractors/qq"
+ _ "github.com/iawia002/lux/extractors/streamtape"
+ _ "github.com/iawia002/lux/extractors/tangdou"
+ _ "github.com/iawia002/lux/extractors/tiktok"
+ _ "github.com/iawia002/lux/extractors/tumblr"
+ _ "github.com/iawia002/lux/extractors/twitter"
+ _ "github.com/iawia002/lux/extractors/udn"
+ _ "github.com/iawia002/lux/extractors/universal"
+ _ "github.com/iawia002/lux/extractors/vimeo"
+ _ "github.com/iawia002/lux/extractors/weibo"
+ _ "github.com/iawia002/lux/extractors/ximalaya"
+ _ "github.com/iawia002/lux/extractors/xvideos"
+ _ "github.com/iawia002/lux/extractors/yinyuetai"
+ _ "github.com/iawia002/lux/extractors/youku"
+ _ "github.com/iawia002/lux/extractors/youtube"
+)
diff --git a/downloader/downloader.go b/downloader/downloader.go
index 81f6540ef..48221a8d2 100644
--- a/downloader/downloader.go
+++ b/downloader/downloader.go
@@ -17,7 +17,7 @@ import (
"github.com/cheggaaa/pb/v3"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
@@ -118,7 +118,7 @@ func (downloader *Downloader) writeFile(url string, file *os.File, headers map[s
return written, nil
}
-func (downloader *Downloader) save(part *types.Part, refer, fileName string) error {
+func (downloader *Downloader) save(part *extractors.Part, refer, fileName string) error {
filePath, err := utils.FilePath(fileName, part.Ext, downloader.option.FileNameLength, downloader.option.OutputPath, false)
if err != nil {
return err
@@ -216,7 +216,7 @@ func (downloader *Downloader) save(part *types.Part, refer, fileName string) err
return nil
}
-func (downloader *Downloader) multiThreadSave(dataPart *types.Part, refer, fileName string) error {
+func (downloader *Downloader) multiThreadSave(dataPart *extractors.Part, refer, fileName string) error {
filePath, err := utils.FilePath(fileName, dataPart.Ext, downloader.option.FileNameLength, downloader.option.OutputPath, false)
if err != nil {
return err
@@ -501,7 +501,7 @@ func mergeMultiPart(filepath string, parts []*FilePartMeta) error {
return err
}
-func (downloader *Downloader) aria2(title string, stream *types.Stream) error {
+func (downloader *Downloader) aria2(title string, stream *extractors.Stream) error {
rpcData := Aria2RPCData{
JSONRPC: "2.0",
ID: "lux", // can be modified
@@ -543,7 +543,7 @@ func (downloader *Downloader) aria2(title string, stream *types.Stream) error {
}
// Download download urls
-func (downloader *Downloader) Download(data *types.Data) error {
+func (downloader *Downloader) Download(data *extractors.Data) error {
if len(data.Streams) == 0 {
return fmt.Errorf("no streams in title %s", data.Title)
}
@@ -642,7 +642,7 @@ func (downloader *Downloader) Download(data *types.Data) error {
parts[index] = partFilePath
wgp.Add()
- go func(part *types.Part, fileName string) {
+ go func(part *extractors.Part, fileName string) {
defer wgp.Done()
err := downloader.save(part, data.URL, fileName)
if err != nil {
@@ -658,7 +658,7 @@ func (downloader *Downloader) Download(data *types.Data) error {
}
downloader.bar.Finish()
- if data.Type != types.DataTypeVideo {
+ if data.Type != extractors.DataTypeVideo {
return nil
}
diff --git a/downloader/downloader_test.go b/downloader/downloader_test.go
index 98e962a95..ff281d406 100644
--- a/downloader/downloader_test.go
+++ b/downloader/downloader_test.go
@@ -3,25 +3,25 @@ package downloader
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
)
func TestDownload(t *testing.T) {
testCases := []struct {
name string
- data *types.Data
+ data *extractors.Data
}{
{
name: "normal test",
- data: &types.Data{
+ data: &extractors.Data{
Site: "douyin",
Title: "test",
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
URL: "https://www.douyin.com",
- Streams: map[string]*types.Stream{
+ Streams: map[string]*extractors.Stream{
"default": {
ID: "default",
- Parts: []*types.Part{
+ Parts: []*extractors.Part{
{
URL: "https://aweme.snssdk.com/aweme/v1/playwm/?video_id=v0200f9a0000bc117isuatl67cees890&line=0",
Size: 4927877,
@@ -34,15 +34,15 @@ func TestDownload(t *testing.T) {
},
{
name: "multi-stream test",
- data: &types.Data{
+ data: &extractors.Data{
Site: "douyin",
Title: "test2",
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
URL: "https://www.douyin.com",
- Streams: map[string]*types.Stream{
+ Streams: map[string]*extractors.Stream{
"miaopai": {
ID: "miaopai",
- Parts: []*types.Part{
+ Parts: []*extractors.Part{
{
URL: "https://txycdn.miaopai.com/stream/KwR26jUGh2ySnVjYbQiFmomNjP14LtMU3vi6sQ__.mp4?ssig=6594aa01a78e78f50c65c164d186ba9e&time_stamp=1537070910786",
Size: 4011590,
@@ -53,7 +53,7 @@ func TestDownload(t *testing.T) {
},
"douyin": {
ID: "douyin",
- Parts: []*types.Part{
+ Parts: []*extractors.Part{
{
URL: "https://aweme.snssdk.com/aweme/v1/playwm/?video_id=v0200f9a0000bc117isuatl67cees890&line=0",
Size: 4927877,
@@ -67,15 +67,15 @@ func TestDownload(t *testing.T) {
},
{
name: "image test",
- data: &types.Data{
+ data: &extractors.Data{
Site: "bcy",
Title: "bcy image test",
- Type: types.DataTypeImage,
+ Type: extractors.DataTypeImage,
URL: "https://www.bcyimg.com",
- Streams: map[string]*types.Stream{
+ Streams: map[string]*extractors.Stream{
"default": {
ID: "default",
- Parts: []*types.Part{
+ Parts: []*extractors.Part{
{
URL: "http://img5.bcyimg.com/coser/143767/post/c0j7x/0d713eb41a614053ac6a3b146914f6bc.jpg/w650",
Size: 56107,
diff --git a/downloader/utils.go b/downloader/utils.go
index 3f5b39f5a..f0634ff46 100644
--- a/downloader/utils.go
+++ b/downloader/utils.go
@@ -6,7 +6,7 @@ import (
"github.com/fatih/color"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
)
var (
@@ -14,8 +14,8 @@ var (
cyan = color.New(color.FgCyan)
)
-func genSortedStreams(streams map[string]*types.Stream) []*types.Stream {
- sortedStreams := make([]*types.Stream, 0, len(streams))
+func genSortedStreams(streams map[string]*extractors.Stream) []*extractors.Stream {
+ sortedStreams := make([]*extractors.Stream, 0, len(streams))
for _, data := range streams {
sortedStreams = append(sortedStreams, data)
}
@@ -27,7 +27,7 @@ func genSortedStreams(streams map[string]*types.Stream) []*types.Stream {
return sortedStreams
}
-func printHeader(data *types.Data) {
+func printHeader(data *extractors.Data) {
fmt.Println()
cyan.Printf(" Site: ") // nolint
fmt.Println(data.Site)
@@ -37,7 +37,7 @@ func printHeader(data *types.Data) {
fmt.Println(data.Type)
}
-func printStream(stream *types.Stream) {
+func printStream(stream *extractors.Stream) {
blue.Println(fmt.Sprintf(" [%s] -------------------", stream.ID)) // nolint
if stream.Quality != "" {
cyan.Printf(" Quality: ") // nolint
@@ -49,7 +49,7 @@ func printStream(stream *types.Stream) {
fmt.Printf("lux -f %s ...\n\n", stream.ID)
}
-func printInfo(data *types.Data, sortedStreams []*types.Stream) {
+func printInfo(data *extractors.Data, sortedStreams []*extractors.Stream) {
printHeader(data)
cyan.Printf(" Streams: ") // nolint
@@ -59,7 +59,7 @@ func printInfo(data *types.Data, sortedStreams []*types.Stream) {
}
}
-func printStreamInfo(data *types.Data, stream *types.Stream) {
+func printStreamInfo(data *extractors.Data, stream *extractors.Stream) {
printHeader(data)
cyan.Printf(" Stream: ") // nolint
diff --git a/extractors/acfun/acfun.go b/extractors/acfun/acfun.go
index 5a2a83ffb..a642ae89c 100644
--- a/extractors/acfun/acfun.go
+++ b/extractors/acfun/acfun.go
@@ -7,12 +7,16 @@ import (
jsoniter "github.com/json-iterator/go"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/parser"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("acfun", New())
+}
+
const (
bangumiDataPattern = "window.pageInfo = window.bangumiData = (.*);"
bangumiListPattern = "window.bangumiList = (.*);"
@@ -25,12 +29,12 @@ const (
type extractor struct{}
// New returns a new acfun bangumi extractor
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract ...
-func (e *extractor) Extract(URL string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(URL string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.GetByte(URL, referer, nil)
if err != nil {
return nil, err
@@ -56,7 +60,7 @@ func (e *extractor) Extract(URL string, option types.Options) ([]*types.Data, er
epDatas = append(epDatas, &bgData.episodeData)
}
- datas := make([]*types.Data, 0)
+ datas := make([]*extractors.Data, 0)
wgp := utils.NewWaitGroupPool(option.ThreadNumber)
for _, epData := range epDatas {
@@ -75,48 +79,48 @@ func concatURL(epData *episodeData) string {
return fmt.Sprintf(bangumiHTMLURL, epData.BangumiID, epData.ItemID)
}
-func extractBangumi(URL string) *types.Data {
+func extractBangumi(URL string) *extractors.Data {
var err error
html, err := request.GetByte(URL, referer, nil)
if err != nil {
- return types.EmptyData(URL, err)
+ return extractors.EmptyData(URL, err)
}
_, vInfo, err := resolvingData(html)
if err != nil {
- return types.EmptyData(URL, err)
+ return extractors.EmptyData(URL, err)
}
- streams := make(map[string]*types.Stream)
+ streams := make(map[string]*extractors.Stream)
for _, stm := range vInfo.AdaptationSet[0].Streams {
m3u8URL, err := url.Parse(stm.URL)
if err != nil {
- return types.EmptyData(URL, err)
+ return extractors.EmptyData(URL, err)
}
urls, err := utils.M3u8URLs(m3u8URL.String())
if err != nil {
_, err = url.Parse(stm.URL)
if err != nil {
- return types.EmptyData(URL, err)
+ return extractors.EmptyData(URL, err)
}
urls, err = utils.M3u8URLs(stm.BackURL)
if err != nil {
- return types.EmptyData(URL, err)
+ return extractors.EmptyData(URL, err)
}
}
// There is no size information in the m3u8 file and the calculation will take too much time, just ignore it.
- parts := make([]*types.Part, 0)
+ parts := make([]*extractors.Part, 0)
for _, u := range urls {
- parts = append(parts, &types.Part{
+ parts = append(parts, &extractors.Part{
URL: u,
Ext: "ts",
})
}
- streams[stm.QualityLabel] = &types.Stream{
+ streams[stm.QualityLabel] = &extractors.Stream{
ID: stm.QualityType,
Parts: parts,
Quality: stm.QualityType,
@@ -126,12 +130,12 @@ func extractBangumi(URL string) *types.Data {
doc, err := parser.GetDoc(string(html))
if err != nil {
- return types.EmptyData(URL, err)
+ return extractors.EmptyData(URL, err)
}
- data := &types.Data{
+ data := &extractors.Data{
Site: "AcFun acfun.cn",
Title: parser.Title(doc),
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: URL,
}
diff --git a/extractors/acfun/acfun_test.go b/extractors/acfun/acfun_test.go
index b4f66aa7b..a7c28980e 100644
--- a/extractors/acfun/acfun_test.go
+++ b/extractors/acfun/acfun_test.go
@@ -3,7 +3,7 @@ package acfun
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -22,7 +22,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/bcy/bcy.go b/extractors/bcy/bcy.go
index 4f808597f..8f0546507 100644
--- a/extractors/bcy/bcy.go
+++ b/extractors/bcy/bcy.go
@@ -5,12 +5,16 @@ import (
"fmt"
"strings"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/parser"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("bcy", New())
+}
+
type bcyData struct {
Detail struct {
PostData struct {
@@ -24,12 +28,12 @@ type bcyData struct {
type extractor struct{}
// New returns a bcy extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
@@ -39,7 +43,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
rep := strings.NewReplacer(`\"`, `"`, `\\`, `\`)
realURLs := utils.MatchOneOf(html, `JSON.parse\("(.+?)"\);`)
if realURLs == nil || len(realURLs) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
jsonString := rep.Replace(realURLs[1])
@@ -54,7 +58,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
}
title := strings.Replace(parser.Title(doc), " - 半次元 banciyuan - ACG爱好者社区", "", -1)
- parts := make([]*types.Part, 0, len(data.Detail.PostData.Multi))
+ parts := make([]*extractors.Part, 0, len(data.Detail.PostData.Multi))
var totalSize int64
for _, img := range data.Detail.PostData.Multi {
size, err := request.Size(img.OriginalPath, url)
@@ -66,23 +70,23 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- parts = append(parts, &types.Part{
+ parts = append(parts, &extractors.Part{
URL: img.OriginalPath,
Size: size,
Ext: ext,
})
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
Parts: parts,
Size: totalSize,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "半次元 bcy.net",
Title: title,
- Type: types.DataTypeImage,
+ Type: extractors.DataTypeImage,
Streams: streams,
URL: url,
},
diff --git a/extractors/bcy/bcy_test.go b/extractors/bcy/bcy_test.go
index e605450c3..18392f2fc 100644
--- a/extractors/bcy/bcy_test.go
+++ b/extractors/bcy/bcy_test.go
@@ -3,7 +3,7 @@ package bcy
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -23,7 +23,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/bilibili/bilibili.go b/extractors/bilibili/bilibili.go
index cd8439b18..3d6cc2df4 100644
--- a/extractors/bilibili/bilibili.go
+++ b/extractors/bilibili/bilibili.go
@@ -8,12 +8,16 @@ import (
"strings"
"time"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/parser"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("bilibili", New())
+}
+
const (
bilibiliAPI = "https://api.bilibili.com/x/player/playurl?"
bilibiliBangumiAPI = "https://api.bilibili.com/pgc/player/web/playurl?"
@@ -85,7 +89,7 @@ type bilibiliOptions struct {
subtitle string
}
-func extractBangumi(url, html string, extractOption types.Options) ([]*types.Data, error) {
+func extractBangumi(url, html string, extractOption extractors.Options) ([]*extractors.Data, error) {
dataString := utils.MatchOneOf(html, `window.__INITIAL_STATE__=(.+?);\(function`)[1]
var data bangumiData
err := json.Unmarshal([]byte(dataString), &data)
@@ -109,12 +113,12 @@ func extractBangumi(url, html string, extractOption types.Options) ([]*types.Dat
cid: cid,
bvid: bvid,
}
- return []*types.Data{bilibiliDownload(options, extractOption)}, nil
+ return []*extractors.Data{bilibiliDownload(options, extractOption)}, nil
}
// handle bangumi playlist
needDownloadItems := utils.NeedDownloadList(extractOption.Items, extractOption.ItemStart, extractOption.ItemEnd, len(data.EpList))
- extractedData := make([]*types.Data, len(needDownloadItems))
+ extractedData := make([]*extractors.Data, len(needDownloadItems))
wgp := utils.NewWaitGroupPool(extractOption.ThreadNumber)
dataIndex := 0
for index, u := range data.EpList {
@@ -134,7 +138,7 @@ func extractBangumi(url, html string, extractOption types.Options) ([]*types.Dat
cid: u.Cid,
bvid: u.BVid,
}
- go func(index int, options bilibiliOptions, extractedData []*types.Data) {
+ go func(index int, options bilibiliOptions, extractedData []*extractors.Data) {
defer wgp.Done()
extractedData[index] = bilibiliDownload(options, extractOption)
}(dataIndex, options, extractedData)
@@ -159,7 +163,7 @@ func getMultiPageData(html string) (*multiPage, error) {
return &data, nil
}
-func extractNormalVideo(url, html string, extractOption types.Options) ([]*types.Data, error) {
+func extractNormalVideo(url, html string, extractOption extractors.Options) ([]*extractors.Data, error) {
pageData, err := getMultiPageData(html)
if err != nil {
return nil, err
@@ -179,7 +183,7 @@ func extractNormalVideo(url, html string, extractOption types.Options) ([]*types
}
if len(pageData.VideoData.Pages) < p || p < 1 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
page := pageData.VideoData.Pages[p-1]
@@ -197,13 +201,13 @@ func extractNormalVideo(url, html string, extractOption types.Options) ([]*types
} else {
options.subtitle = page.Part
}
- return []*types.Data{bilibiliDownload(options, extractOption)}, nil
+ return []*extractors.Data{bilibiliDownload(options, extractOption)}, nil
}
// handle normal video playlist
// https://www.bilibili.com/video/av20827366/?p=1
needDownloadItems := utils.NeedDownloadList(extractOption.Items, extractOption.ItemStart, extractOption.ItemEnd, len(pageData.VideoData.Pages))
- extractedData := make([]*types.Data, len(needDownloadItems))
+ extractedData := make([]*extractors.Data, len(needDownloadItems))
wgp := utils.NewWaitGroupPool(extractOption.ThreadNumber)
dataIndex := 0
for index, u := range pageData.VideoData.Pages {
@@ -220,7 +224,7 @@ func extractNormalVideo(url, html string, extractOption types.Options) ([]*types
subtitle: u.Part,
page: u.Page,
}
- go func(index int, options bilibiliOptions, extractedData []*types.Data) {
+ go func(index int, options bilibiliOptions, extractedData []*extractors.Data) {
defer wgp.Done()
extractedData[index] = bilibiliDownload(options, extractOption)
}(dataIndex, options, extractedData)
@@ -233,12 +237,12 @@ func extractNormalVideo(url, html string, extractOption types.Options) ([]*types
type extractor struct{}
// New returns a bilibili extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
var err error
html, err := request.Get(url, referer, nil)
if err != nil {
@@ -257,7 +261,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
}
// bilibiliDownload is the download function for a single URL
-func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *types.Data {
+func bilibiliDownload(options bilibiliOptions, extractOption extractors.Options) *extractors.Data {
var (
err error
html string
@@ -268,7 +272,7 @@ func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *typ
} else {
html, err = request.Get(options.url, referer, nil)
if err != nil {
- return types.EmptyData(options.url, err)
+ return extractors.EmptyData(options.url, err)
}
}
@@ -277,17 +281,17 @@ func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *typ
// "accept_quality":[127,120,112,80,48,32,16],
api, err := genAPI(options.aid, options.cid, 127, options.bvid, options.bangumi, extractOption.Cookie)
if err != nil {
- return types.EmptyData(options.url, err)
+ return extractors.EmptyData(options.url, err)
}
jsonString, err := request.Get(api, referer, nil)
if err != nil {
- return types.EmptyData(options.url, err)
+ return extractors.EmptyData(options.url, err)
}
var data dash
err = json.Unmarshal([]byte(jsonString), &data)
if err != nil {
- return types.EmptyData(options.url, err)
+ return extractors.EmptyData(options.url, err)
}
var dashData dashInfo
if data.Data.Description == nil {
@@ -296,7 +300,7 @@ func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *typ
dashData = data.Data
}
- var audioPart *types.Part
+ var audioPart *extractors.Part
if dashData.Streams.Audio != nil {
// Get audio part
var audioID int
@@ -311,23 +315,23 @@ func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *typ
}
s, err := request.Size(audios[audioID], referer)
if err != nil {
- return types.EmptyData(options.url, err)
+ return extractors.EmptyData(options.url, err)
}
- audioPart = &types.Part{
+ audioPart = &extractors.Part{
URL: audios[audioID],
Size: s,
Ext: "m4a",
}
}
- streams := make(map[string]*types.Stream, len(dashData.Quality))
+ streams := make(map[string]*extractors.Stream, len(dashData.Quality))
for _, stream := range dashData.Streams.Video {
s, err := request.Size(stream.BaseURL, referer)
if err != nil {
- return types.EmptyData(options.url, err)
+ return extractors.EmptyData(options.url, err)
}
- parts := make([]*types.Part, 0, 2)
- parts = append(parts, &types.Part{
+ parts := make([]*extractors.Part, 0, 2)
+ parts = append(parts, &extractors.Part{
URL: stream.BaseURL,
Size: s,
Ext: getExtFromMimeType(stream.MimeType),
@@ -340,7 +344,7 @@ func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *typ
size += part.Size
}
id := fmt.Sprintf("%d-%d", stream.ID, stream.Codecid)
- streams[id] = &types.Stream{
+ streams[id] = &extractors.Stream{
Parts: parts,
Size: size,
Quality: fmt.Sprintf("%s %s", qualityString[stream.ID], stream.Codecs),
@@ -353,7 +357,7 @@ func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *typ
// get the title
doc, err := parser.GetDoc(html)
if err != nil {
- return types.EmptyData(options.url, err)
+ return extractors.EmptyData(options.url, err)
}
title := parser.Title(doc)
if options.subtitle != "" {
@@ -364,14 +368,14 @@ func bilibiliDownload(options bilibiliOptions, extractOption types.Options) *typ
}
}
- return &types.Data{
+ return &extractors.Data{
Site: "哔哩哔哩 bilibili.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
- Captions: map[string]*types.CaptionPart{
+ Captions: map[string]*extractors.CaptionPart{
"danmaku": {
- Part: types.Part{
+ Part: extractors.Part{
URL: fmt.Sprintf("https://comment.bilibili.com/%d.xml", options.cid),
Ext: "xml",
},
@@ -390,7 +394,7 @@ func getExtFromMimeType(mimeType string) string {
return "mp4"
}
-func getSubTitleCaptionPart(aid int, cid int) *types.CaptionPart {
+func getSubTitleCaptionPart(aid int, cid int) *extractors.CaptionPart {
jsonString, err := request.Get(
fmt.Sprintf("http://api.bilibili.com/x/web-interface/view?aid=%d&cid=%d", aid, cid), referer, nil,
)
@@ -402,8 +406,8 @@ func getSubTitleCaptionPart(aid int, cid int) *types.CaptionPart {
if err != nil || len(stu.Data.SubtitleInfo.SubtitleList) == 0 {
return nil
}
- return &types.CaptionPart{
- Part: types.Part{
+ return &extractors.CaptionPart{
+ Part: extractors.Part{
URL: stu.Data.SubtitleInfo.SubtitleList[0].SubtitleUrl,
Ext: "srt",
},
diff --git a/extractors/bilibili/bilibili_test.go b/extractors/bilibili/bilibili_test.go
index b285ef024..403dda3af 100644
--- a/extractors/bilibili/bilibili_test.go
+++ b/extractors/bilibili/bilibili_test.go
@@ -3,7 +3,7 @@ package bilibili
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -71,18 +71,18 @@ func TestBilibili(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var (
- data []*types.Data
+ data []*extractors.Data
err error
)
if tt.playlist {
// for playlist, we don't check the data
- _, err = New().Extract(tt.args.URL, types.Options{
+ _, err = New().Extract(tt.args.URL, extractors.Options{
Playlist: true,
ThreadNumber: 9,
})
test.CheckError(t, err)
} else {
- data, err = New().Extract(tt.args.URL, types.Options{})
+ data, err = New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
}
diff --git a/extractors/douyin/douyin.go b/extractors/douyin/douyin.go
index 6e547c644..6ade34bca 100644
--- a/extractors/douyin/douyin.go
+++ b/extractors/douyin/douyin.go
@@ -6,20 +6,26 @@ import (
"net/http"
"strings"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ e := New()
+ extractors.Register("douyin", e)
+ extractors.Register("iesdouyin", e)
+}
+
type extractor struct{}
// New returns a douyin extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
if strings.Contains(url, "v.douyin.com") {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
@@ -42,7 +48,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
return nil, errors.New("unable to get video ID")
}
if itemIds == nil || len(itemIds) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
itemId := itemIds[len(itemIds)-1]
jsonData, err := request.Get("https://www.iesdouyin.com/web/api/v2/aweme/iteminfo/?item_ids="+itemId, url, nil)
@@ -54,12 +60,12 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
return nil, err
}
- urlData := make([]*types.Part, 0)
- var douyinType types.DataType
+ urlData := make([]*extractors.Part, 0)
+ var douyinType extractors.DataType
var totalSize int64
// AwemeType: 2:image 4:video
if douyin.ItemList[0].AwemeType == 2 {
- douyinType = types.DataTypeImage
+ douyinType = extractors.DataTypeImage
for _, img := range douyin.ItemList[0].Images {
realURL := img.URLList[len(img.URLList)-1]
size, err := request.Size(realURL, url)
@@ -71,33 +77,33 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- urlData = append(urlData, &types.Part{
+ urlData = append(urlData, &extractors.Part{
URL: realURL,
Size: size,
Ext: ext,
})
}
} else {
- douyinType = types.DataTypeVideo
+ douyinType = extractors.DataTypeVideo
realURL := "https://aweme.snssdk.com/aweme/v1/play/?video_id=" + douyin.ItemList[0].Video.PlayAddr.URI + "&ratio=720p&line=0"
totalSize, err = request.Size(realURL, url)
if err != nil {
return nil, err
}
- urlData = append(urlData, &types.Part{
+ urlData = append(urlData, &extractors.Part{
URL: realURL,
Size: totalSize,
Ext: "mp4",
})
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
Parts: urlData,
Size: totalSize,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "抖音 douyin.com",
Title: douyin.ItemList[0].Desc,
diff --git a/extractors/douyin/douyin_test.go b/extractors/douyin/douyin_test.go
index 7079db69f..2474b3609 100644
--- a/extractors/douyin/douyin_test.go
+++ b/extractors/douyin/douyin_test.go
@@ -3,7 +3,7 @@ package douyin
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -29,7 +29,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/douyu/douyu.go b/extractors/douyu/douyu.go
index 34e14e0c8..aac136ce7 100644
--- a/extractors/douyu/douyu.go
+++ b/extractors/douyu/douyu.go
@@ -4,11 +4,15 @@ import (
"encoding/json"
"errors"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("douyu", New())
+}
+
type douyuData struct {
Error int `json:"error"`
Data struct {
@@ -50,12 +54,12 @@ func douyuM3u8(url string) ([]douyuURLInfo, int64, error) {
type extractor struct{}
// New returns a douyu extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
var err error
liveVid := utils.MatchOneOf(url, `https?://www.douyu.com/(\S+)`)
if liveVid != nil {
@@ -68,13 +72,13 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
}
titles := utils.MatchOneOf(html, `
(.*?)`)
if titles == nil || len(titles) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
title := titles[1]
vids := utils.MatchOneOf(url, `https?://v.douyu.com/show/(\S+)`)
if vids == nil || len(vids) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
vid := vids[1]
@@ -91,26 +95,26 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- urls := make([]*types.Part, len(m3u8URLs))
+ urls := make([]*extractors.Part, len(m3u8URLs))
for index, u := range m3u8URLs {
- urls[index] = &types.Part{
+ urls[index] = &extractors.Part{
URL: u.URL,
Size: u.Size,
Ext: "ts",
}
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
Parts: urls,
Size: totalSize,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "斗鱼 douyu.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/douyu/douyu_test.go b/extractors/douyu/douyu_test.go
index fc08084dd..b0df39c99 100644
--- a/extractors/douyu/douyu_test.go
+++ b/extractors/douyu/douyu_test.go
@@ -3,7 +3,7 @@ package douyu
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -23,7 +23,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- New().Extract(tt.args.URL, types.Options{})
+ New().Extract(tt.args.URL, extractors.Options{})
})
}
}
diff --git a/extractors/eporner/eporner.go b/extractors/eporner/eporner.go
index d1ff23d97..5ac45ef4c 100644
--- a/extractors/eporner/eporner.go
+++ b/extractors/eporner/eporner.go
@@ -7,12 +7,16 @@ import (
"github.com/PuerkitoBio/goquery"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/parser"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("eporner", New())
+}
+
const (
downloadclass = ".dloaddivcol"
)
@@ -94,12 +98,12 @@ func getSrc(html string) []*src {
type extractor struct{}
// New returns a eporner extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(u string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(u string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.Get(u, u, nil)
if err != nil {
return nil, err
@@ -116,7 +120,7 @@ func (e *extractor) Extract(u string, option types.Options) ([]*types.Data, erro
return nil, err
}
srcs := getSrc(html)
- streams := make(map[string]*types.Stream, len(srcs))
+ streams := make(map[string]*extractors.Stream, len(srcs))
for _, src := range srcs {
srcurl := uu.Scheme + "://" + uu.Host + src.url
// skipping an extra HEAD request to the URL.
@@ -124,22 +128,22 @@ func (e *extractor) Extract(u string, option types.Options) ([]*types.Data, erro
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: srcurl,
Size: src.size,
Ext: "mp4",
}
- streams[src.quality] = &types.Stream{
- Parts: []*types.Part{urlData},
+ streams[src.quality] = &extractors.Stream{
+ Parts: []*extractors.Part{urlData},
Size: src.size,
Quality: src.quality,
}
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "EPORNER eporner.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: u,
},
diff --git a/extractors/eporner/eporner_test.go b/extractors/eporner/eporner_test.go
index 77f12edb4..4f6422110 100644
--- a/extractors/eporner/eporner_test.go
+++ b/extractors/eporner/eporner_test.go
@@ -3,7 +3,7 @@ package eporner
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -24,7 +24,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/types/defs.go b/extractors/errors.go
similarity index 92%
rename from extractors/types/defs.go
rename to extractors/errors.go
index eea34512a..cf22073a1 100644
--- a/extractors/types/defs.go
+++ b/extractors/errors.go
@@ -1,4 +1,4 @@
-package types
+package extractors
import (
"errors"
diff --git a/extractors/extractors.go b/extractors/extractors.go
index 7873ac11d..37a9adc77 100644
--- a/extractors/extractors.go
+++ b/extractors/extractors.go
@@ -3,95 +3,23 @@ package extractors
import (
"net/url"
"strings"
+ "sync"
- "github.com/iawia002/lux/extractors/acfun"
- "github.com/iawia002/lux/extractors/bcy"
- "github.com/iawia002/lux/extractors/bilibili"
- "github.com/iawia002/lux/extractors/douyin"
- "github.com/iawia002/lux/extractors/douyu"
- "github.com/iawia002/lux/extractors/eporner"
- "github.com/iawia002/lux/extractors/facebook"
- "github.com/iawia002/lux/extractors/geekbang"
- "github.com/iawia002/lux/extractors/haokan"
- "github.com/iawia002/lux/extractors/hupu"
- "github.com/iawia002/lux/extractors/huya"
- "github.com/iawia002/lux/extractors/instagram"
- "github.com/iawia002/lux/extractors/iqiyi"
- "github.com/iawia002/lux/extractors/mgtv"
- "github.com/iawia002/lux/extractors/miaopai"
- "github.com/iawia002/lux/extractors/netease"
- "github.com/iawia002/lux/extractors/pixivision"
- "github.com/iawia002/lux/extractors/pornhub"
- "github.com/iawia002/lux/extractors/qq"
- "github.com/iawia002/lux/extractors/streamtape"
- "github.com/iawia002/lux/extractors/tangdou"
- "github.com/iawia002/lux/extractors/tiktok"
- "github.com/iawia002/lux/extractors/tumblr"
- "github.com/iawia002/lux/extractors/twitter"
- "github.com/iawia002/lux/extractors/types"
- "github.com/iawia002/lux/extractors/udn"
- "github.com/iawia002/lux/extractors/universal"
- "github.com/iawia002/lux/extractors/vimeo"
- "github.com/iawia002/lux/extractors/weibo"
- "github.com/iawia002/lux/extractors/ximalaya"
- "github.com/iawia002/lux/extractors/xvideos"
- "github.com/iawia002/lux/extractors/yinyuetai"
- "github.com/iawia002/lux/extractors/youku"
- "github.com/iawia002/lux/extractors/youtube"
"github.com/iawia002/lux/utils"
)
-var extractorMap map[string]types.Extractor
+var lock sync.RWMutex
+var extractorMap = make(map[string]Extractor)
-func init() {
- douyinExtractor := douyin.New()
- youtubeExtractor := youtube.New()
- stExtractor := streamtape.New()
-
- extractorMap = map[string]types.Extractor{
- "": universal.New(), // universal extractor
-
- "douyin": douyinExtractor,
- "iesdouyin": douyinExtractor,
- "bilibili": bilibili.New(),
- "bcy": bcy.New(),
- "pixivision": pixivision.New(),
- "youku": youku.New(),
- "youtube": youtubeExtractor,
- "youtu": youtubeExtractor, // youtu.be
- "iqiyi": iqiyi.New(iqiyi.SiteTypeIqiyi),
- "iq": iqiyi.New(iqiyi.SiteTypeIQ),
- "mgtv": mgtv.New(),
- "tangdou": tangdou.New(),
- "tumblr": tumblr.New(),
- "vimeo": vimeo.New(),
- "facebook": facebook.New(),
- "douyu": douyu.New(),
- "miaopai": miaopai.New(),
- "163": netease.New(),
- "weibo": weibo.New(),
- "ximalaya": ximalaya.New(),
- "instagram": instagram.New(),
- "twitter": twitter.New(),
- "qq": qq.New(),
- "yinyuetai": yinyuetai.New(),
- "geekbang": geekbang.New(),
- "pornhub": pornhub.New(),
- "xvideos": xvideos.New(),
- "udn": udn.New(),
- "tiktok": tiktok.New(),
- "haokan": haokan.New(),
- "acfun": acfun.New(),
- "eporner": eporner.New(),
- "streamtape": stExtractor,
- "streamta": stExtractor, // streamta.pe
- "hupu": hupu.New(),
- "huya": huya.New(),
- }
+// Register registers an Extractor.
+func Register(domain string, e Extractor) {
+ lock.Lock()
+ extractorMap[domain] = e
+ lock.Unlock()
}
// Extract is the main function to extract the data.
-func Extract(u string, option types.Options) ([]*types.Data, error) {
+func Extract(u string, option Options) ([]*Data, error) {
u = strings.TrimSpace(u)
var domain string
diff --git a/extractors/facebook/facebook.go b/extractors/facebook/facebook.go
index 7005596eb..5b0a3bec2 100644
--- a/extractors/facebook/facebook.go
+++ b/extractors/facebook/facebook.go
@@ -3,20 +3,24 @@ package facebook
import (
"fmt"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("facebook", New())
+}
+
type extractor struct{}
// New returns a facebook extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
var err error
html, err := request.Get(url, url, nil)
if err != nil {
@@ -24,11 +28,11 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
}
titles := utils.MatchOneOf(html, `(.+)`)
if titles == nil || len(titles) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
title := titles[1]
- streams := make(map[string]*types.Stream, 2)
+ streams := make(map[string]*extractors.Stream, 2)
for _, quality := range []string{"sd", "hd"} {
srcElement := utils.MatchOneOf(
html, fmt.Sprintf(`%s_src_no_ratelimit:"(.+?)"`, quality),
@@ -42,23 +46,23 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: u,
Size: size,
Ext: "mp4",
}
- streams[quality] = &types.Stream{
- Parts: []*types.Part{urlData},
+ streams[quality] = &extractors.Stream{
+ Parts: []*extractors.Part{urlData},
Size: size,
Quality: quality,
}
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "Facebook facebook.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/facebook/facebook_test.go b/extractors/facebook/facebook_test.go
index 19dd6482e..835537783 100644
--- a/extractors/facebook/facebook_test.go
+++ b/extractors/facebook/facebook_test.go
@@ -3,7 +3,7 @@ package facebook
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -24,7 +24,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/geekbang/geekbang.go b/extractors/geekbang/geekbang.go
index caa5af18d..a7eaea739 100644
--- a/extractors/geekbang/geekbang.go
+++ b/extractors/geekbang/geekbang.go
@@ -7,11 +7,15 @@ import (
"net/http"
"strings"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("geekbang", New())
+}
+
type geekData struct {
Code int `json:"code"`
Error json.RawMessage `json:"error"`
@@ -74,16 +78,16 @@ func geekM3u8(url string) ([]geekURLInfo, error) {
type extractor struct{}
// New returns a geekbang extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, _ extractors.Options) ([]*extractors.Data, error) {
var err error
matches := utils.MatchOneOf(url, `https?://time.geekbang.org/course/detail/(\d+)-(\d+)`)
if matches == nil || len(matches) < 3 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
// Get video information
@@ -140,7 +144,7 @@ func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error)
title := data.Data.Title
- streams := make(map[string]*types.Stream, len(playInfo.PlayInfoList.PlayInfo))
+ streams := make(map[string]*extractors.Stream, len(playInfo.PlayInfoList.PlayInfo))
for _, media := range playInfo.PlayInfoList.PlayInfo {
m3u8URLs, err := geekM3u8(media.URL)
@@ -149,26 +153,26 @@ func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error)
return nil, err
}
- urls := make([]*types.Part, len(m3u8URLs))
+ urls := make([]*extractors.Part, len(m3u8URLs))
for index, u := range m3u8URLs {
- urls[index] = &types.Part{
+ urls[index] = &extractors.Part{
URL: u.URL,
Size: u.Size,
Ext: "ts",
}
}
- streams[media.Definition] = &types.Stream{
+ streams[media.Definition] = &extractors.Stream{
Parts: urls,
Size: media.Size,
}
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "极客时间 geekbang.org",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/geekbang/geekbang_test.go b/extractors/geekbang/geekbang_test.go
index 729e1153d..ee48d9e89 100644
--- a/extractors/geekbang/geekbang_test.go
+++ b/extractors/geekbang/geekbang_test.go
@@ -3,7 +3,7 @@ package geekbang
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -23,7 +23,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/haokan/haokan.go b/extractors/haokan/haokan.go
index f250ca5b3..7f57ec43c 100644
--- a/extractors/haokan/haokan.go
+++ b/extractors/haokan/haokan.go
@@ -3,20 +3,24 @@ package haokan
import (
"strings"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("haokan", New())
+}
+
type extractor struct{}
// New returns a haokan extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
@@ -24,7 +28,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
titles := utils.MatchOneOf(html, `property="og:title"\s+content="(.+?)"`)
if titles == nil || len(titles) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
title := titles[1]
@@ -37,7 +41,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
}
if urls == nil || len(urls) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
playurl := strings.Replace(urls[1], `\/`, `/`, -1)
@@ -52,9 +56,9 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
return nil, err
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
- Parts: []*types.Part{
+ Parts: []*extractors.Part{
{
URL: playurl,
Size: size,
@@ -65,11 +69,11 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "好看视频 haokan.baidu.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/haokan/haokan_test.go b/extractors/haokan/haokan_test.go
index c6c40b158..fe6abdd24 100644
--- a/extractors/haokan/haokan_test.go
+++ b/extractors/haokan/haokan_test.go
@@ -3,7 +3,7 @@ package haokan
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -23,7 +23,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/hupu/hupu.go b/extractors/hupu/hupu.go
index ce3a8ae74..d2e2889d4 100644
--- a/extractors/hupu/hupu.go
+++ b/extractors/hupu/hupu.go
@@ -1,19 +1,23 @@
package hupu
import (
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("hupu", New())
+}
+
type extractor struct{}
// New returns a hupu extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
@@ -32,31 +36,31 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if len(urlDesc) > 1 {
videoUrl = urlDesc[1]
} else {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
size, err := request.Size(videoUrl, url)
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: videoUrl,
Size: size,
Ext: "mp4",
}
quality := "normal"
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
quality: {
- Parts: []*types.Part{urlData},
+ Parts: []*extractors.Part{urlData},
Size: size,
Quality: quality,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "虎扑 hupu.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/hupu/hupu_test.go b/extractors/hupu/hupu_test.go
index b494c10de..42fe8dc8a 100644
--- a/extractors/hupu/hupu_test.go
+++ b/extractors/hupu/hupu_test.go
@@ -3,7 +3,7 @@ package hupu
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -22,7 +22,7 @@ func TestHupu(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- New().Extract(tt.args.URL, types.Options{})
+ New().Extract(tt.args.URL, extractors.Options{})
})
}
}
diff --git a/extractors/huya/huya.go b/extractors/huya/huya.go
index 9d33c589a..ec03e7efc 100644
--- a/extractors/huya/huya.go
+++ b/extractors/huya/huya.go
@@ -1,21 +1,25 @@
package huya
import (
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("huya", New())
+}
+
type extractor struct{}
const huyaVideoHost = "https://videotx-platform.cdn.huya.com/"
// New returns a huya extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
@@ -34,31 +38,31 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if len(videoDesc) > 1 {
videoUrl = huyaVideoHost + videoDesc[1]
} else {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
size, err := request.Size(videoUrl, url)
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: videoUrl,
Size: size,
Ext: "mp4",
}
quality := "normal"
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
quality: {
- Parts: []*types.Part{urlData},
+ Parts: []*extractors.Part{urlData},
Size: size,
Quality: quality,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "虎牙 huya.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/huya/huya_test.go b/extractors/huya/huya_test.go
index 39034bd71..592e998d8 100644
--- a/extractors/huya/huya_test.go
+++ b/extractors/huya/huya_test.go
@@ -3,7 +3,7 @@ package huya
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -22,7 +22,7 @@ func TestHuya(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- New().Extract(tt.args.URL, types.Options{})
+ New().Extract(tt.args.URL, extractors.Options{})
})
}
}
diff --git a/extractors/instagram/instagram.go b/extractors/instagram/instagram.go
index 9172bb909..e7c1980af 100644
--- a/extractors/instagram/instagram.go
+++ b/extractors/instagram/instagram.go
@@ -6,12 +6,16 @@ import (
"path"
"strings"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/parser"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("instagram", New())
+}
+
type instagram struct {
ShortcodeMedia struct {
EdgeSidecar struct {
@@ -29,24 +33,24 @@ type instagram struct {
type extractor struct{}
// New returns a instagram extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
-func extractImageFromPage(html, url string) (map[string]*types.Stream, error) {
+func extractImageFromPage(html, url string) (map[string]*extractors.Stream, error) {
_, realURLs, err := parser.GetImages(html, "EmbeddedMediaImage", nil)
if err != nil {
return nil, err
}
- urls := make([]*types.Part, 0, len(realURLs))
+ urls := make([]*extractors.Part, 0, len(realURLs))
var totalSize int64
for _, realURL := range realURLs {
size, err := request.Size(realURL, url)
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: realURL,
Size: size,
Ext: "jpg",
@@ -55,7 +59,7 @@ func extractImageFromPage(html, url string) (map[string]*types.Stream, error) {
totalSize += size
}
- return map[string]*types.Stream{
+ return map[string]*extractors.Stream{
"default": {
Parts: urls,
Size: totalSize,
@@ -63,13 +67,13 @@ func extractImageFromPage(html, url string) (map[string]*types.Stream, error) {
}, nil
}
-func extractFromData(dataString, url string) (map[string]*types.Stream, error) {
+func extractFromData(dataString, url string) (map[string]*extractors.Stream, error) {
var data instagram
if err := json.Unmarshal([]byte(dataString), &data); err != nil {
return nil, err
}
- urls := make([]*types.Part, 0, len(data.ShortcodeMedia.EdgeSidecar.Edges))
+ urls := make([]*extractors.Part, 0, len(data.ShortcodeMedia.EdgeSidecar.Edges))
var totalSize int64
for _, u := range data.ShortcodeMedia.EdgeSidecar.Edges {
// Image
@@ -85,7 +89,7 @@ func extractFromData(dataString, url string) (map[string]*types.Stream, error) {
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: realURL,
Size: size,
Ext: ext,
@@ -94,7 +98,7 @@ func extractFromData(dataString, url string) (map[string]*types.Stream, error) {
totalSize += size
}
- return map[string]*types.Stream{
+ return map[string]*extractors.Stream{
"default": {
Parts: urls,
Size: totalSize,
@@ -103,7 +107,7 @@ func extractFromData(dataString, url string) (map[string]*types.Stream, error) {
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
// Instagram is forcing a login to access the page, so we use the embed page to bypass that.
u, err := netURL.Parse(url)
if err != nil {
@@ -118,11 +122,11 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
}
dataStrings := utils.MatchOneOf(html, `window\.__additionalDataLoaded\('graphql',(.*)\);`)
if dataStrings == nil || len(dataStrings) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
dataString := dataStrings[1]
- var streams map[string]*types.Stream
+ var streams map[string]*extractors.Stream
if dataString == "" || dataString == "null" {
streams, err = extractImageFromPage(html, url)
} else {
@@ -132,11 +136,11 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
return nil, err
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "Instagram instagram.com",
Title: "Instagram " + id,
- Type: types.DataTypeImage,
+ Type: extractors.DataTypeImage,
Streams: streams,
URL: url,
},
diff --git a/extractors/instagram/instagram_test.go b/extractors/instagram/instagram_test.go
index a068c3a00..0bd4b9bde 100644
--- a/extractors/instagram/instagram_test.go
+++ b/extractors/instagram/instagram_test.go
@@ -3,7 +3,7 @@ package instagram
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -39,7 +39,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/iqiyi/iqiyi.go b/extractors/iqiyi/iqiyi.go
index a283ba915..ac2d7ede7 100644
--- a/extractors/iqiyi/iqiyi.go
+++ b/extractors/iqiyi/iqiyi.go
@@ -8,12 +8,17 @@ import (
"strings"
"time"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/parser"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("iqiyi", New(SiteTypeIqiyi))
+ extractors.Register("iq", New(SiteTypeIQ))
+}
+
type iqiyi struct {
Code string `json:"code"`
Data struct {
@@ -108,14 +113,14 @@ type extractor struct {
}
// New returns a iqiyi extractor.
-func New(siteType SiteType) types.Extractor {
+func New(siteType SiteType) extractors.Extractor {
return &extractor{
siteType: siteType,
}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, _ extractors.Options) ([]*extractors.Data, error) {
refer := iqiyiReferer
headers := make(map[string]string)
if e.siteType == SiteTypeIQ {
@@ -143,7 +148,7 @@ func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error)
)
}
if tvid == nil || len(tvid) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
vid := utils.MatchOneOf(
@@ -160,7 +165,7 @@ func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error)
)
}
if vid == nil || len(vid) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
doc, err := parser.GetDoc(html)
@@ -196,10 +201,10 @@ func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error)
return nil, fmt.Errorf("can't play this video: %s", videoDatas.Msg)
}
- streams := make(map[string]*types.Stream)
+ streams := make(map[string]*extractors.Stream)
urlPrefix := videoDatas.Data.VP.Du
for _, video := range videoDatas.Data.VP.Tkl[0].Vs {
- urls := make([]*types.Part, len(video.Fs))
+ urls := make([]*extractors.Part, len(video.Fs))
for index, v := range video.Fs {
realURLData, err := request.Get(urlPrefix+v.L, refer, nil)
if err != nil {
@@ -213,13 +218,13 @@ func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error)
if err != nil {
return nil, err
}
- urls[index] = &types.Part{
+ urls[index] = &extractors.Part{
URL: realURL.L,
Size: v.B,
Ext: ext,
}
}
- streams[strconv.Itoa(video.Bid)] = &types.Stream{
+ streams[strconv.Itoa(video.Bid)] = &extractors.Stream{
Parts: urls,
Size: video.Vsize,
Quality: video.Scrsz,
@@ -230,11 +235,11 @@ func (e *extractor) Extract(url string, _ types.Options) ([]*types.Data, error)
if e.siteType == SiteTypeIQ {
siteName = "爱奇艺 iq.com"
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: siteName,
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/iqiyi/iqiyi_test.go b/extractors/iqiyi/iqiyi_test.go
index f234be757..d907b7357 100644
--- a/extractors/iqiyi/iqiyi_test.go
+++ b/extractors/iqiyi/iqiyi_test.go
@@ -3,7 +3,7 @@ package iqiyi
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -42,7 +42,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New(SiteTypeIqiyi).Extract(tt.args.URL, types.Options{})
+ data, err := New(SiteTypeIqiyi).Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/mgtv/mgtv.go b/extractors/mgtv/mgtv.go
index 0e7d30462..f9c5a2965 100644
--- a/extractors/mgtv/mgtv.go
+++ b/extractors/mgtv/mgtv.go
@@ -9,11 +9,15 @@ import (
"strings"
"time"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("mgtv", New())
+}
+
type mgtvVideoStream struct {
Name string `json:"name"`
URL string `json:"url"`
@@ -97,12 +101,12 @@ func encodeTk2(str string) string {
type extractor struct{}
// New returns a mgtv extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
@@ -116,7 +120,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
vid = utils.MatchOneOf(html, `vid: (\d+),`)
}
if vid == nil || len(vid) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
// API extract from https://js.mgtv.com/imgotv-miniv6/global/page/play-tv.js
@@ -166,7 +170,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
)
mgtvStreams := mgtvData.Data.Stream
var addr mgtvVideoAddr
- streams := make(map[string]*types.Stream)
+ streams := make(map[string]*extractors.Stream)
for _, stream := range mgtvStreams {
if stream.URL == "" {
continue
@@ -185,26 +189,26 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- urls := make([]*types.Part, len(m3u8URLs))
+ urls := make([]*extractors.Part, len(m3u8URLs))
for index, u := range m3u8URLs {
- urls[index] = &types.Part{
+ urls[index] = &extractors.Part{
URL: u.URL,
Size: u.Size,
Ext: "ts",
}
}
- streams[stream.Def] = &types.Stream{
+ streams[stream.Def] = &extractors.Stream{
Parts: urls,
Size: totalSize,
Quality: stream.Name,
}
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "芒果TV mgtv.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/mgtv/mgtv_test.go b/extractors/mgtv/mgtv_test.go
index 743f2c437..6aff155d0 100644
--- a/extractors/mgtv/mgtv_test.go
+++ b/extractors/mgtv/mgtv_test.go
@@ -3,7 +3,7 @@ package mgtv
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -42,7 +42,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- New().Extract(tt.args.URL, types.Options{})
+ New().Extract(tt.args.URL, extractors.Options{})
})
}
}
diff --git a/extractors/miaopai/miaopai.go b/extractors/miaopai/miaopai.go
index aa988435d..92f1aee8e 100644
--- a/extractors/miaopai/miaopai.go
+++ b/extractors/miaopai/miaopai.go
@@ -8,11 +8,15 @@ import (
"strings"
"time"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("miaopai", New())
+}
+
type miaopaiData struct {
Data struct {
Description string `json:"description"`
@@ -41,15 +45,15 @@ func getRandomString(l int) string {
type extractor struct{}
// New returns a miaopai extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
ids := utils.MatchOneOf(url, `/media/([^\./]+)`, `/show(?:/channel)?/([^\./]+)`)
if ids == nil || len(ids) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
id := ids[1]
@@ -79,23 +83,23 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: realURL,
Size: size,
Ext: "mp4",
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
- Parts: []*types.Part{urlData},
+ Parts: []*extractors.Part{urlData},
Size: size,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "秒拍 miaopai.com",
Title: data.Data.Description,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/miaopai/miaopai_test.go b/extractors/miaopai/miaopai_test.go
index 1918e1cdc..26e70e985 100644
--- a/extractors/miaopai/miaopai_test.go
+++ b/extractors/miaopai/miaopai_test.go
@@ -3,7 +3,7 @@ package miaopai
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -23,7 +23,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/netease/netease.go b/extractors/netease/netease.go
index 2cfd42391..1585fcb0a 100644
--- a/extractors/netease/netease.go
+++ b/extractors/netease/netease.go
@@ -5,20 +5,24 @@ import (
netURL "net/url"
"strings"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("163", New())
+}
+
type extractor struct{}
// New returns a netease extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
url = strings.Replace(url, "/#/", "/", 1)
vid := utils.MatchOneOf(url, `/(mv|video)\?id=(\w+)`)
if vid == nil {
@@ -35,13 +39,13 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
titles := utils.MatchOneOf(html, ``)
if titles == nil || len(titles) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
title := titles[1]
realURLs := utils.MatchOneOf(html, ``)
if realURLs == nil || len(realURLs) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
realURL, _ := netURL.QueryUnescape(realURLs[1])
@@ -49,22 +53,22 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- urlData := &types.Part{
+ urlData := &extractors.Part{
URL: realURL,
Size: size,
Ext: "mp4",
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
- Parts: []*types.Part{urlData},
+ Parts: []*extractors.Part{urlData},
Size: size,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "网易云音乐 music.163.com",
Title: title,
- Type: types.DataTypeVideo,
+ Type: extractors.DataTypeVideo,
Streams: streams,
URL: url,
},
diff --git a/extractors/netease/netease_test.go b/extractors/netease/netease_test.go
index 442e8d2eb..c447ff5e2 100644
--- a/extractors/netease/netease_test.go
+++ b/extractors/netease/netease_test.go
@@ -3,7 +3,7 @@ package netease
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -31,7 +31,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/pixivision/pixivision.go b/extractors/pixivision/pixivision.go
index 451585758..cbf80c0d0 100644
--- a/extractors/pixivision/pixivision.go
+++ b/extractors/pixivision/pixivision.go
@@ -1,21 +1,25 @@
package pixivision
import (
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/parser"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("pixivision", New())
+}
+
type extractor struct{}
// New returns a pixivision extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
html, err := request.Get(url, url, nil)
if err != nil {
return nil, err
@@ -25,7 +29,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
return nil, err
}
- parts := make([]*types.Part, 0, len(urls))
+ parts := make([]*extractors.Part, 0, len(urls))
for _, u := range urls {
_, ext, err := utils.GetNameAndExt(u)
if err != nil {
@@ -35,24 +39,24 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
if err != nil {
return nil, err
}
- parts = append(parts, &types.Part{
+ parts = append(parts, &extractors.Part{
URL: u,
Size: size,
Ext: ext,
})
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
Parts: parts,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "pixivision pixivision.net",
Title: title,
- Type: types.DataTypeImage,
+ Type: extractors.DataTypeImage,
Streams: streams,
URL: url,
},
diff --git a/extractors/pixivision/pixivision_test.go b/extractors/pixivision/pixivision_test.go
index 2530d8cca..42be99339 100644
--- a/extractors/pixivision/pixivision_test.go
+++ b/extractors/pixivision/pixivision_test.go
@@ -3,7 +3,7 @@ package pixivision
import (
"testing"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/test"
)
@@ -22,7 +22,7 @@ func TestDownload(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- data, err := New().Extract(tt.args.URL, types.Options{})
+ data, err := New().Extract(tt.args.URL, extractors.Options{})
test.CheckError(t, err)
test.Check(t, tt.args, data[0])
})
diff --git a/extractors/pornhub/pornhub.go b/extractors/pornhub/pornhub.go
index 635ab9525..4340b0b34 100644
--- a/extractors/pornhub/pornhub.go
+++ b/extractors/pornhub/pornhub.go
@@ -14,11 +14,15 @@ import (
"github.com/robertkrimen/otto"
- "github.com/iawia002/lux/extractors/types"
+ "github.com/iawia002/lux/extractors"
"github.com/iawia002/lux/request"
"github.com/iawia002/lux/utils"
)
+func init() {
+ extractors.Register("pornhub", New())
+}
+
type pornhubData struct {
DefaultQuality bool `json:"defaultQuality"`
Format string `json:"format"`
@@ -29,12 +33,12 @@ type pornhubData struct {
type extractor struct{}
// New returns a pornhub extractor.
-func New() types.Extractor {
+func New() extractors.Extractor {
return &extractor{}
}
// Extract is the main function to extract the data.
-func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, error) {
+func (e *extractor) Extract(url string, option extractors.Options) ([]*extractors.Data, error) {
res, err := request.Request(http.MethodGet, url, nil, nil)
if err != nil {
return nil, err
@@ -77,7 +81,7 @@ func (e *extractor) Extract(url string, option types.Options) ([]*types.Data, er
reg, err := regexp.Compile(``,
)
if jsonStrings == nil || len(jsonStrings) < 2 {
- return nil, types.ErrURLParseFailed
+ return nil, extractors.ErrURLParseFailed
}
jsonString := jsonStrings[1]
var totalSize int64
- urls := make([]*types.Part, 0, 1)
+ urls := make([]*extractors.Part, 0, 1)
if strings.Contains(jsonString, `"image":{"@list"`) {
// there are two data structures in the same field(image)
var imageList tumblrImageList
@@ -77,28 +81,28 @@ func tumblrImageDownload(url, html, title string) ([]*types.Data, error) {
totalSize = size
urls = append(urls, urlData)
}
- streams := map[string]*types.Stream{
+ streams := map[string]*extractors.Stream{
"default": {
Parts: urls,
Size: totalSize,
},
}
- return []*types.Data{
+ return []*extractors.Data{
{
Site: "Tumblr tumblr.com",
Title: title,
- Type: types.DataTypeImage,
+ Type: extractors.DataTypeImage,
Streams: streams,
URL: url,
},
}, nil
}
-func tumblrVideoDownload(url, html, title string) ([]*types.Data, error) {
+func tumblrVideoDownload(url, html, title string) ([]*extractors.Data, error) {
videoURLs := utils.MatchOneOf(html, `