Compare commits

...
Sign in to create a new pull request.

2 commits
main ... go

Author SHA1 Message Date
392664451d
wip 2023-12-24 22:10:01 +09:00
59c28ca0cd
cleanup project 2023-12-07 23:24:02 +09:00
26 changed files with 703 additions and 2909 deletions

1
.gitignore vendored
View file

@ -1,3 +1,2 @@
node_modules
*.db
dist

View file

@ -3,15 +3,16 @@
## Usage
```
$ npx https://git.fogtype.com/nebel/gadl/archive/main.tar.gz --help
$ go install git.fogtype.com/nebel/gadl
$ gadl help
```
## Supported Sites
- DLsite 同人
- DMM ブックス (漫画)
- FANZA 同人
- Google Play ブックス (漫画)
- [ ] Google Play ブックス (漫画)
- [ ] DMM ブックス (漫画)
- [ ] FANZA 同人
- [ ] DLsite 同人
## License

View file

@ -1,4 +0,0 @@
#!/usr/bin/env node
import "tsx";
await import("../main.ts");

238
browser.go Normal file
View file

@ -0,0 +1,238 @@
package main
import (
"encoding/base64"
"encoding/json"
"errors"
"log"
"net/http"
"net/url"
"strings"
"github.com/playwright-community/playwright-go"
)
type rawImageFile struct {
url string
blocks []map[string]int
width *int
height *int
}
type file struct {
contentType string
data []byte
}
type browser struct {
db *database
pw *playwright.Playwright
browser *playwright.Browser
}
func installBrowser() error {
opts := playwright.RunOptions{
Browsers: []string{"chromium"},
Verbose: true,
}
return playwright.Install(&opts)
}
type runOptions struct {
db *database
headless bool
}
func runBrowser(options runOptions) *browser {
pw, err := playwright.Run()
if err != nil {
log.Fatal(err)
}
launchOptions := playwright.BrowserTypeLaunchOptions{
Headless: playwright.Bool(options.headless),
}
b, err := pw.Chromium.Launch(launchOptions)
if err != nil {
log.Fatal(err)
}
browser := browser{
db: options.db,
pw: pw,
browser: &b,
}
return &browser
}
func (b browser) stop() error {
if b.browser != nil {
(*b.browser).Close()
}
return b.pw.Stop()
}
func (b browser) newContext() (playwright.BrowserContext, error) {
browser := *b.browser
return browser.NewContext()
}
func (b browser) loadBrowserContext(platform string) (playwright.BrowserContext, error) {
var secrets string
row := b.db.QueryRow("select secrets from platforms where name = $1", platform)
if err := row.Scan(&secrets); err != nil {
return nil, err
}
var storageState playwright.OptionalStorageState
if err := json.Unmarshal([]byte(secrets), &storageState); err != nil {
return nil, err
}
browserNewContextOptions := playwright.BrowserNewContextOptions{
StorageState: &storageState,
}
ctx, err := (*b.browser).NewContext(browserNewContextOptions)
if err != nil {
return nil, err
}
return ctx, nil
}
func (b browser) saveBrowserContext(platform string, ctx playwright.BrowserContext) error {
storageState, err := ctx.StorageState()
if err != nil {
return err
}
secrets, err := json.Marshal(storageState)
if err != nil {
return err
}
if _, err := b.db.Exec("update platforms set secrets = $1 where name = $2", secrets, platform); err != nil {
return err
}
return nil
}
func parseDataURL(dataURL string) (*file, error) {
if !strings.HasPrefix(dataURL, "data:") {
return nil, errors.New("not data scheme")
}
parts := strings.SplitN(dataURL, ",", 2)
if len(parts) != 2 {
return nil, errors.New("invalid data URL")
}
raw, err := url.PathUnescape(parts[1])
if err != nil {
return nil, err
}
var data []byte
if strings.HasSuffix(parts[0], ";base64") {
data, err = base64.StdEncoding.DecodeString(raw)
if err != nil {
return nil, err
}
} else {
data = []byte(raw)
}
file := file{
contentType: http.DetectContentType(data),
data: data,
}
return &file, nil
}
var draw = `
async function drawImage(imageFile) {
const canvas = Object.assign(document.createElement("canvas"), {
width: imageFile.width,
height: imageFile.height,
});
const image = (await new Promise((resolve) => {
Object.assign(new Image(), {
crossOrigin: "use-credentials",
src: imageFile.url,
onload() {
resolve(this);
},
});
}));
const ctx = canvas.getContext("2d");
for (const q of imageFile.blocks) {
ctx.drawImage(image, q.destX, q.destY, q.width, q.height, q.srcX, q.srcY, q.width, q.height);
}
const dataUrl = canvas.toDataURL();
return dataUrl;
}
`
var fetch = `
async function fetchImage(imageFile) {
const res = await fetch(imageFile.url);
const blob = await res.blob();
const dataUrl = await new Promise((resolve, reject) => {
const fileReader = Object.assign(new FileReader(), {
onload() {
resolve(this.result);
},
onerror(e) {
const error = new Error(` + "`${e.type}: ${e.message}`" + `);
reject(error);
},
});
fileReader.readAsDataURL(blob);
});
return dataUrl;
}
`
func (b browser) drawImage(page playwright.Page, imageFile rawImageFile) (*file, error) {
if len(imageFile.blocks) > 0 {
dataURL, err := page.Evaluate(draw, imageFile)
if err != nil {
return nil, err
}
return parseDataURL(dataURL.(string))
}
if strings.HasPrefix(imageFile.url, "blob:") {
dataURL, err := page.Evaluate(fetch, imageFile)
if err != nil {
return nil, err
}
return parseDataURL(dataURL.(string))
}
res, err := page.Context().Request().Get(imageFile.url)
if err != nil {
return nil, err
}
data, err := res.Body()
if err != nil {
return nil, err
}
file := file{
contentType: http.DetectContentType(data),
data: data,
}
return &file, nil
}

View file

@ -1,166 +0,0 @@
import * as Playwright from "playwright";
import { chromium, devices } from "playwright";
import type { Database } from "./database";
import type { TPlatform } from "./platform";
export type ImageFile = {
url: string;
blocks?: Array<Record<string, number>>;
width?: number;
height?: number;
};
async function drawImage(imageFile: ImageFile): Promise<string> {
const canvas = Object.assign(document.createElement("canvas"), {
width: imageFile.width,
height: imageFile.height,
});
const image = (await new Promise((resolve) => {
Object.assign(new Image(), {
crossOrigin: "use-credentials",
src: imageFile.url,
onload() {
resolve(this);
},
});
})) as HTMLImageElement;
const ctx = canvas.getContext("2d")!;
for (const q of imageFile.blocks!) {
ctx.drawImage(
image,
q.destX,
q.destY,
q.width,
q.height,
q.srcX,
q.srcY,
q.width,
q.height,
);
}
const dataUrl = canvas.toDataURL();
return dataUrl;
}
async function fetchImage(imageFile: ImageFile): Promise<string> {
const res = await fetch(imageFile.url);
const blob = await res.blob();
const dataUrl: string = await new Promise((resolve, reject) => {
const fileReader = Object.assign(new FileReader(), {
onload(): void {
resolve(this.result);
},
onerror(e: ErrorEvent): void {
const error = new Error(`${e.type}: ${e.message}`);
reject(error);
},
});
fileReader.readAsDataURL(blob);
});
return dataUrl;
}
async function dataUrlToBlob(dataUrl: string): Promise<Blob> {
const res = await fetch(dataUrl);
return await res.blob();
}
export type Browser = {
loadBrowserContext(platform: TPlatform): Promise<Playwright.BrowserContext>;
saveBrowserContext(platform: TPlatform, ctx: BrowserContext): Promise<void>;
newContext(): Promise<Playwright.BrowserContext>;
close(): Promise<void>;
drawImage(
pageOrFrame: Playwright.Page | Playwright.Frame,
imageFile: ImageFile,
): Promise<Blob>;
};
export type BrowserContext = Playwright.BrowserContext;
export async function createBrowser({
db,
headless = true,
}: {
db: Database;
headless?: boolean;
}): Promise<Browser> {
const { userAgent } = devices["Desktop Chrome"];
const browser = await chromium.launch({
headless,
args: ["--disable-blink-features=AutomationControlled"],
});
return {
async loadBrowserContext(
platform: TPlatform,
): Promise<Playwright.BrowserContext> {
const { secrets } = await db.get(
`select secrets from platforms where name = ?`,
platform,
);
const storageState = JSON.parse(secrets) ?? undefined;
const ctx = await browser.newContext({ storageState, userAgent });
return ctx;
},
async saveBrowserContext(
platform: TPlatform,
ctx: BrowserContext,
): Promise<void> {
const secrets = await ctx.storageState();
await db.run(
`update platforms set secrets = ? where name = ?`,
JSON.stringify(secrets),
platform,
);
},
newContext: () => browser.newContext(),
close: () => browser.close(),
async drawImage(
pageOrFrame: Playwright.Page | Playwright.Frame,
imageFile: ImageFile,
): Promise<Blob> {
if (Array.isArray(imageFile.blocks) && imageFile.blocks.length > 0) {
const dataUrl = await pageOrFrame.evaluate(drawImage, imageFile);
return await dataUrlToBlob(dataUrl);
}
if (imageFile.url.startsWith("blob:")) {
const dataUrl = await pageOrFrame.evaluate(fetchImage, imageFile);
return await dataUrlToBlob(dataUrl);
}
const page = "page" in pageOrFrame ? pageOrFrame.page() : pageOrFrame;
const res = await page.context().request.get(imageFile.url);
const headers = res.headers();
const buffer = await res.body();
let type = headers["content-type"];
if (type === "binary/octet-stream") {
const [, extension] =
/^attachment *; *filename="[^"]+[.]([^.]+)" *(?:$|;)/i.exec(
headers["content-disposition"],
) ?? [];
switch (extension) {
case "zip":
type = "application/zip";
break;
}
}
return new Blob([buffer], { type });
},
};
}

49
database.go Normal file
View file

@ -0,0 +1,49 @@
package main
import (
"database/sql"
"embed"
"log"
"net/url"
"os"
"path/filepath"
"github.com/amacneil/dbmate/v2/pkg/dbmate"
_ "github.com/amacneil/dbmate/v2/pkg/driver/sqlite"
_ "github.com/mattn/go-sqlite3"
)
//go:embed migrations/*.sql
var fs embed.FS
type database struct {
*sql.DB
}
func newDatabase(path string) *database {
url, err := url.Parse("sqlite3://")
if err != nil {
log.Fatal(err)
}
url.Path, err = filepath.Abs(path)
if err != nil {
log.Fatal(err)
}
dbmate := dbmate.New(url)
dbmate.AutoDumpSchema = false
dbmate.FS = fs
dbmate.MigrationsDir = []string{"migrations"}
dbmate.CreateAndMigrate()
if err := os.Chmod(url.Path, 0600); err != nil {
log.Fatal(err)
}
db, err := sql.Open("sqlite3", url.Path)
if err != nil {
log.Fatal(err)
}
return &database{db}
}

View file

@ -1,21 +0,0 @@
import fs from "node:fs/promises";
import path from "node:path";
import sqlite3 from "sqlite3";
import { Database, open } from "sqlite";
export { Database };
export async function createDatabase(file: string): Promise<Database> {
await fs.mkdir(path.dirname(file), { recursive: true });
const db = await open({
filename: file,
driver: sqlite3.cached.Database,
});
const migrationsPath = new URL("./migrations", import.meta.url).pathname;
await fs.chmod(file, 0o600);
await db.migrate({ migrationsPath });
return db;
}

21
go.mod Normal file
View file

@ -0,0 +1,21 @@
module git.fogtype.com/nebel/gadl
go 1.21.5
require (
github.com/amacneil/dbmate/v2 v2.9.0
github.com/mattn/go-sqlite3 v1.14.19
github.com/playwright-community/playwright-go v0.3900.1
github.com/urfave/cli/v2 v2.26.0
)
require (
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 // indirect
github.com/go-jose/go-jose/v3 v3.0.1 // indirect
github.com/go-stack/stack v1.8.1 // indirect
github.com/lib/pq v1.10.9 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect
go.uber.org/multierr v1.11.0 // indirect
)

49
go.sum Normal file
View file

@ -0,0 +1,49 @@
github.com/amacneil/dbmate/v2 v2.9.0 h1:uXBlYKEQJL2gwXdiSlJLcXpgpibeak3OY0NN0oM/TCM=
github.com/amacneil/dbmate/v2 v2.9.0/go.mod h1:ygYXKjaOsIUnIZa/jfyosyfN2BXwwRY8uDGGTvQCADQ=
github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/go-jose/go-jose/v3 v3.0.1 h1:pWmKFVtt+Jl0vBZTIpz/eAKwsm6LkIxDVVbFHKkchhA=
github.com/go-jose/go-jose/v3 v3.0.1/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8=
github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI=
github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw=
github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI=
github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/playwright-community/playwright-go v0.3900.1 h1:8BkmDxVzLTp3USQ50EyXJSXcz0XDMwNP5y29lHIZ9Fc=
github.com/playwright-community/playwright-go v0.3900.1/go.mod h1:mbNzMqt04IVRdhVfXWqmCxd81gCdL3BA5hj6/pVAIqM=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/urfave/cli/v2 v2.26.0 h1:3f3AMg3HpThFNT4I++TKOejZO8yU55t3JnnSr4S4QEI=
github.com/urfave/cli/v2 v2.26.0/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e h1:+SOyEddqYF09QP7vr7CgJ1eti3pY9Fn3LHO1M1r/0sI=
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
github.com/zenizh/go-capturer v0.0.0-20211219060012-52ea6c8fed04 h1:qXafrlZL1WsJW5OokjraLLRURHiw0OzKHD/RNdspp4w=
github.com/zenizh/go-capturer v0.0.0-20211219060012-52ea6c8fed04/go.mod h1:FiwNQxz6hGoNFBC4nIx+CxZhI3nne5RmIOlT/MXcSD4=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

140
library.go Normal file
View file

@ -0,0 +1,140 @@
package main
import (
"encoding/json"
)
type book struct {
ID int `json:"id"`
Platform string `json:"platform"`
ReaderURL string `json:"readerUrl"`
Title string `json:"title"`
Authors []string `json:"authors"`
}
type library struct {
db *database
}
type newLibraryOptions struct {
db *database
}
func newLibrary(options newLibraryOptions) *library {
return &library{options.db}
}
func (lib *library) add(b book) error {
authors, err := json.Marshal(b.Authors)
if err != nil {
return err
}
_, err = lib.db.Exec(`
insert into books(
platform_id,
reader_url,
title,
authors)
values((select id from platforms where name = $1), $2, $3, $4)
on conflict(reader_url)
do update set title = excluded.title, authors = excluded.authors
`,
b.Platform,
b.ReaderURL,
b.Title,
authors,
)
return err
}
func (lib *library) delete(id string) error {
_, err := lib.db.Exec(`delete from books where id = $1`, id)
return err
}
func (lib *library) get(readerURLOrBookID string) (*book, error) {
row := lib.db.QueryRow(`
select
books.id,
platforms.name,
books.reader_url,
books.title,
books.authors
from books
left join platforms
on books.platform_id = platforms.id
where books.reader_url = $1 or books.id = $1
`,
readerURLOrBookID,
)
var (
b book
authors []byte
)
err := row.Scan(
&b.ID,
&b.Platform,
&b.ReaderURL,
&b.Title,
&authors,
)
if err != nil {
return nil, err
}
err = json.Unmarshal(authors, &b.Authors)
return &b, err
}
func (lib *library) getBooks() (*[]book, error) {
rows, err := lib.db.Query(`
select
books.id,
platforms.name,
books.reader_url,
books.title,
books.authors
from books
left join platforms
on books.platform_id = platforms.id
`)
if err != nil {
return nil, err
}
bs := &[]book{}
for rows.Next() {
var (
b book
authors []byte
)
if err := rows.Scan(
&b.ID,
&b.Platform,
&b.ReaderURL,
&b.Title,
&authors,
); err != nil {
return nil, err
}
if err = json.Unmarshal(authors, &b.Authors); err != nil {
return nil, err
}
*bs = append(*bs, b)
}
return bs, err
}

View file

@ -1,156 +0,0 @@
import fs from "node:fs/promises";
import { createWriteStream } from "node:fs";
import stream from "node:stream/promises";
import { Zip, ZipPassThrough } from "fflate";
import { Database } from "./database";
import { type TPlatform, site } from "./platform";
export type Book = {
id: number;
platform: TPlatform;
readerUrl: string;
title: string;
authors: Array<string>;
};
export function createLibrary(db: Database) {
return {
async add(readerUrlOrBook: string | Book) {
if (typeof readerUrlOrBook === "string") {
const platform = site(readerUrlOrBook);
await db.run(
`\
insert into books(
platform_id,
reader_url)
values((select id from platforms where name = ?), ?)
on conflict(reader_url)
do nothing
`,
platform,
readerUrlOrBook,
);
return;
}
await db.run(
`\
insert into books(
platform_id,
reader_url,
title,
authors)
values((select id from platforms where name = ?), ?, ?, ?)
on conflict(reader_url)
do update set title = excluded.title, authors = excluded.authors
`,
readerUrlOrBook.platform,
readerUrlOrBook.readerUrl,
readerUrlOrBook.title,
JSON.stringify(readerUrlOrBook.authors),
);
},
async delete(id: number) {
await db.run(`delete from books where id = ?`, id);
},
async get(readerUrlOrBookId: string | number): Promise<Book | undefined> {
const row = await db.get(
`select books.id, platforms.name as platform, books.reader_url as readerUrl, books.title, books.authors from books left join platforms on books.platform_id = platforms.id where books.reader_url = ? or books.id = ?`,
readerUrlOrBookId,
Number(readerUrlOrBookId),
);
const book: Book | undefined = row && {
...row,
authors: JSON.parse(row.authors),
};
return book;
},
async getBooks(): Promise<Array<Book>> {
const rows = await db.all(
`select books.id, platforms.name as platform, books.reader_url as readerUrl, books.title, books.authors from books left join platforms on books.platform_id = platforms.id`,
);
const books: Array<Book> = rows.map((row) => ({
...row,
authors: JSON.parse(row.authors),
}));
return books;
},
async archive(
path: string,
book: Book,
opts: {
outDir: string;
outAuthorsLimit: number;
},
) {
const bookDir = await fs.stat(path);
if (!bookDir.isDirectory()) {
throw new Error(`Not found: ${path}`);
}
if (!book.title) {
book.title = String(book.id);
}
if (book.authors.length > 0) {
book.title = `${book.title}`;
}
const title = `${book.authors.slice(0, opts.outAuthorsLimit).join("、")}${
book.title
}`.replace(/[/]/g, "%2F");
const files = await fs.readdir(path);
if (files.every((f) => f.match(/[.](zip|cbz)$/))) {
const digits = String(files.length).length;
function pad(n: string) {
return n.padStart(digits, "0");
}
for (const [n, f] of Object.entries(files)) {
await fs.rename(
`${path}/${f}`,
`${opts.outDir}/${title}${
files.length > 1 ? ` - ${pad(n)}` : ""
}.${f.split(".").at(-1)}`,
);
}
await fs.rmdir(path);
return;
}
const out = createWriteStream(`${opts.outDir}/${title}.cbz`);
const zip = new Zip(function cb(err, data, final) {
if (err) {
out.destroy(err);
return;
}
out[final ? "end" : "write"](data);
});
for (const file of files) {
const data = new ZipPassThrough(file);
zip.add(data);
const buffer = await fs.readFile(`${path}/${file}`);
data.push(buffer, true);
}
zip.end();
await stream.finished(out);
await fs.rm(path, { recursive: true });
},
};
}

169
main.go Normal file
View file

@ -0,0 +1,169 @@
package main
import (
"encoding/json"
"fmt"
"log"
"os"
"github.com/urfave/cli/v2"
)
var version = "v1.0.0"
/*
TODO
- platform
- login
- logout
- pull
- download
*/
func main() {
flags := []cli.Flag{
&cli.StringFlag{
Name: "db",
Usage: "database file path",
Value: "gadl.db",
},
&cli.StringFlag{
Name: "platform",
Usage: "platform type",
Value: "google-play-books",
},
}
app := &cli.App{
Name: "gadl",
Usage: "Manga Downloader",
Version: version,
Flags: flags,
Commands: cli.Commands{
&cli.Command{
Name: "install",
Usage: "install system dependencies for browser",
Flags: flags,
Action: func(c *cli.Context) error {
return installBrowser()
},
},
&cli.Command{
Name: "login",
Flags: flags,
Action: func(c *cli.Context) error {
db := newDatabase(c.String("db"))
browser := runBrowser(runOptions{db: db, headless: false})
platform := c.String("platform")
_, err := browser.loadBrowserContext(platform)
if err != nil {
log.Fatal(err)
}
return browser.stop()
},
},
&cli.Command{
Name: "add",
Flags: flags,
Action: func(c *cli.Context) error {
db := newDatabase(c.String("db"))
lib := newLibrary(newLibraryOptions{db})
for _, url := range c.Args().Slice() {
b := book{
// TODO: url to platform
Platform: c.String("platform"),
ReaderURL: url,
}
if err := lib.add(b); err != nil {
log.Fatal(err)
}
}
return nil
},
},
&cli.Command{
Name: "delete",
Flags: flags,
Action: func(c *cli.Context) error {
db := newDatabase(c.String("db"))
lib := newLibrary(newLibraryOptions{db})
for _, id := range c.Args().Slice() {
if err := lib.delete(id); err != nil {
log.Fatal(err)
}
}
return nil
},
},
&cli.Command{
Name: "list",
Flags: flags,
Action: func(c *cli.Context) error {
db := newDatabase(c.String("db"))
lib := newLibrary(newLibraryOptions{db})
bs, err := lib.getBooks()
if err != nil {
log.Fatal(err)
}
for _, b := range *bs {
json, err := json.Marshal(&b)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(json))
}
return nil
},
},
&cli.Command{
Name: "view",
Flags: flags,
Action: func(c *cli.Context) error {
db := newDatabase(c.String("db"))
lib := newLibrary(newLibraryOptions{db})
for _, id := range c.Args().Slice() {
b, err := lib.get(id)
if err != nil {
log.Fatal(err)
}
json, err := json.Marshal(&b)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(json))
}
return nil
},
},
&cli.Command{
Name: "download",
Flags: flags,
Action: func(c *cli.Context) error {
newDatabase(c.String("db"))
return nil
},
},
},
}
if err := app.Run(os.Args); err != nil {
log.Fatal(err)
}
}

246
main.ts
View file

@ -1,246 +0,0 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import util from "node:util";
import { createBrowser } from "./browser";
import { createDatabase } from "./database";
import { type Book, createLibrary } from "./library";
import { type TPlatform, createPlatform, platforms } from "./platform";
import * as pkg from "./package.json";
const options = {
db: {
type: "string",
default: "gadl.db",
toString() {
return `<database_file_path> (default: ${this.default})`;
},
},
"out-dir": {
type: "string",
default: "dist",
toString() {
return `<output_directory_path> (default: ${this.default})`;
},
},
"out-authors-limit": {
type: "string",
default: "3",
toString() {
return `<output_authors_limit> (default: ${this.default})`;
},
},
login: {
type: "string",
toString() {
return [...Object.keys(platforms)].join("|");
},
async run() {
const db = await createDatabase(args.values.db!);
const browser = await createBrowser({ db, headless: false });
const platform = createPlatform({
platform: args.values.login as TPlatform,
db,
browser,
});
await platform.login();
await browser.close();
},
},
logout: {
type: "string",
toString() {
return [...Object.keys(platforms)].join("|");
},
async run() {
const db = await createDatabase(args.values.db!);
const browser = await createBrowser({ db });
const platform = createPlatform({
platform: args.values.logout as TPlatform,
db,
browser,
});
await platform.logout();
await browser.close();
},
},
add: {
type: "string",
toString() {
return `<reader_url_or_id>`;
},
async run() {
const db = await createDatabase(args.values.db!);
const library = createLibrary(db);
await library.add(args.values.add!);
},
},
delete: {
type: "string",
toString() {
return `<id>`;
},
async run() {
const db = await createDatabase(args.values.db!);
const library = createLibrary(db);
await library.delete(Number(args.values.delete));
},
},
list: {
type: "boolean",
short: "l",
async run() {
const db = await createDatabase(args.values.db!);
const library = createLibrary(db);
const books = await library.getBooks();
console.dir(books, {
depth: null,
maxArrayLength: null,
maxStringLength: null,
});
},
},
view: {
type: "string",
toString() {
return `<reader_url_or_id>`;
},
async run() {
const db = await createDatabase(args.values.db!);
const library = createLibrary(db);
const book = await library.get(args.values.view!);
if (!book) {
process.exit(1);
}
console.dir(book, {
depth: null,
maxArrayLength: null,
maxStringLength: null,
});
},
},
pull: {
type: "string",
toString() {
return [...Object.keys(platforms)].join("|");
},
async run() {
const db = await createDatabase(args.values.db!);
const library = createLibrary(db);
const browser = await createBrowser({ db });
const platform = createPlatform({
platform: args.values.pull as TPlatform,
db,
browser,
});
for await (const book of platform.pull()) {
await library.add(book);
}
await browser.close();
},
},
download: {
type: "string",
toString() {
return `all|<reader_url_or_id>`;
},
async run() {
const db = await createDatabase(args.values.db!);
const library = createLibrary(db);
const books: Array<Book> = [];
if (args.values.download === "all") {
books.push(...(await library.getBooks()));
} else {
if (URL.canParse(args.values.download!)) {
await library.add(args.values.download!);
}
const book = await library.get(args.values.download!);
if (!book) {
process.exit(1);
}
books.push(book);
}
for (const book of books) {
const browser = await createBrowser({ db });
const platform = createPlatform({
platform: book.platform,
db,
browser,
});
const dir = await fs.mkdtemp(
path.join(os.tmpdir(), `gadl-${book.id}-`),
);
await platform.download(dir, book);
await library.archive(dir, book, {
outDir: args.values["out-dir"]!,
outAuthorsLimit: Number(args.values["out-authors-limit"]!),
});
await browser.close();
}
},
},
json: {
type: "boolean",
},
version: {
type: "boolean",
short: "v",
run() {
console.log(pkg.version);
},
},
help: {
type: "boolean",
short: "h",
run() {
console.log(
[
"Usage: gadl [options...]",
` $ npx playwright@${pkg.dependencies.playwright} install --with-deps chromium`,
` $ gadl --login=<platform_type>`,
` $ gadl --download=<reader_url>`,
"",
"Available options:",
...Object.entries(options).map((option) =>
[
` --${option[0]}`,
"short" in option[1] && ` -${option[1].short}`,
option[1].type === "string" && `=${option[1]}`,
]
.filter(Boolean)
.join(""),
),
].join("\n"),
);
},
},
} as const;
const args = util.parseArgs({ options });
if (args.values.json) {
console.dir = function dir(arrayOrObject) {
for (const obj of [arrayOrObject].flat()) {
console.log(JSON.stringify(obj));
}
};
}
for (const option of Object.keys(options)) {
if (args.values[option] && typeof options[option].run === "function") {
await options[option].run();
process.exit();
}
}
options.help.run();

12
migrations/0_init.sql Normal file
View file

@ -0,0 +1,12 @@
-- migrate:up
create table if not exists migrations (
id integer primary key
);
insert into schema_migrations(version)
select id
from migrations;
drop table if exists migrations;
-- migrate:down

View file

@ -1,3 +1,4 @@
-- migrate:up
create table platforms (
id integer primary key autoincrement,
name text unique not null,
@ -14,3 +15,5 @@ create table books (
insert into platforms(name) values
('dmm-books'),
('google-play-books');
-- migrate:down

View file

@ -1,2 +1,5 @@
-- migrate:up
alter table books add column title text not null default '';
alter table books add column authors json not null default '[]';
-- migrate:down

View file

@ -1,2 +1,5 @@
-- migrate:up
insert into platforms(name) values
('fanza-doujin');
-- migrate:down

View file

@ -1,2 +1,5 @@
-- migrate:up
insert into platforms(name) values
('dlsite-maniax');
-- migrate:down

1600
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,17 +0,0 @@
{
"name": "@fogtype/gadl",
"version": "1.4.0",
"license": "AGPL-3.0",
"type": "module",
"bin": "bin/run.js",
"dependencies": {
"fflate": "^0.8.1",
"playwright": "1.40.1",
"sqlite": "^5.1.1",
"sqlite3": "^5.1.6",
"tsx": "^4.6.1"
},
"devDependencies": {
"@types/node": "^20.10.2"
}
}

7
platform.go Normal file
View file

@ -0,0 +1,7 @@
package main
type platform interface {
name() string
pull() []book
getFiles(b book) []file
}

View file

@ -1,111 +0,0 @@
import fs from "node:fs/promises";
import path from "node:path";
import type { Book } from "./library";
import type { Browser } from "./browser";
import type { Database } from "./database";
import { DlsiteManiax } from "./platforms/dlsite-maniax";
import { DmmBooks } from "./platforms/dmm-books";
import { FanzaDoujin } from "./platforms/fanza-doujin";
import { GooglePlayBooks } from "./platforms/google-play-books";
export const platforms = {
"dlsite-maniax": DlsiteManiax,
"dmm-books": DmmBooks,
"fanza-doujin": FanzaDoujin,
"google-play-books": GooglePlayBooks,
};
export type TPlatform = keyof typeof platforms;
export function site(url: string): TPlatform {
for (const [platform, { siteUrl }] of Object.entries(platforms)) {
if (siteUrl(new URL(url))) return platform as TPlatform;
}
throw new Error(`Unsupported URL: ${url}`);
}
export function createPlatform(opts: {
platform: TPlatform;
db: Database;
browser: Browser;
}) {
if (!(opts.platform in platforms)) {
throw new Error(
`The value must be a platform type: ${[...Object.keys(platforms)].join(
", ",
)}.`,
);
}
const platform = platforms[opts.platform](opts.browser);
return {
...platform,
async download(dir: string, book: Book): Promise<void> {
await fs.mkdir(dir);
const files: Array<() => Promise<Blob>> = await platform.getFiles(book);
const digits = String(files.length).length;
function pad(n: string) {
return n.padStart(digits, "0");
}
const supportedTypes = {
"image/png": "png",
"image/jpeg": "jpg",
"application/zip": "zip",
"application/vnd.comicbook+zip": "cbz",
};
for (const [n, getBlob] of Object.entries(files)) {
const blob = await getBlob();
const extension = supportedTypes[blob.type];
if (!extension) {
throw new Error(
`It was ${blob.type}. The image must be a file of type: ${[
...Object.keys(supportedTypes),
].join(", ")}.`,
);
}
const buffer = Buffer.from(await blob.arrayBuffer());
await fs.writeFile(`${dir}/${pad(n)}.${extension}`, buffer);
}
process.stderr.write(`\n`);
},
async login() {
const ctx = await opts.browser.newContext();
const page = await ctx.newPage();
for (const loginEndpoint of platform.loginEndpoints) {
await page.goto(loginEndpoint);
await page.waitForURL(platform.loginSuccessUrl, { timeout: 0 });
}
await opts.browser.saveBrowserContext(opts.platform, ctx);
},
async logout() {
try {
const ctx = await opts.browser.loadBrowserContext(opts.platform);
const page = await ctx.newPage();
for (const logoutEndpoint of platform.logoutEndpoints) {
await page.goto(logoutEndpoint);
}
} catch (error) {
process.stderr.write(`Warning: ${(error as Error).message}\n`);
}
await opts.db.run(
`update platforms set secrets = 'null' where name = ?`,
opts.platform,
);
},
};
}

View file

@ -1,104 +0,0 @@
import type { Book } from "../library";
import type { Browser, BrowserContext, ImageFile } from "../browser";
export function DlsiteManiax(browser: Browser) {
async function* getAllBooks(ctx: BrowserContext): AsyncGenerator<Book> {
const totalCountEndpoint = "https://play.dlsite.com/api/product_count";
const endpoint = "https://play.dlsite.com/api/purchases";
const pager = {
page: 1,
perPage: 50,
totalCount: Infinity,
};
const res = await ctx.request.get(totalCountEndpoint);
const body: {
user: number;
} = await res.json();
pager.totalCount = body.user;
while ((pager.page - 1) * pager.perPage <= pager.totalCount) {
const res = await ctx.request.get(`${endpoint}?page=${pager.page}`);
if (!res.ok()) {
throw new Error(`${res.status()} ${res.statusText()}`);
}
const body: {
limit: number;
works: Array<{
workno: number;
name: {
ja_JP: string;
};
maker: {
name: {
ja_JP: string;
};
};
}>;
} = await res.json();
for (const work of Object.values(body.works).flat()) {
yield {
id: NaN,
platform: "dlsite-maniax",
readerUrl: `https://play.dlsite.com/#/work/${work.workno}`,
title: work.name.ja_JP || "",
authors: [work.maker.name.ja_JP || ""],
};
process.stderr.write(".");
}
pager.page += 1;
pager.perPage = body.limit;
}
}
return {
async *pull(): AsyncGenerator<Book> {
const ctx = await browser.loadBrowserContext("dlsite-maniax");
yield* getAllBooks(ctx);
process.stderr.write(`\n`);
},
async getFiles(book: Book): Promise<Array<() => Promise<Blob>>> {
const ctx = await browser.loadBrowserContext("dlsite-maniax");
const page = await ctx.newPage();
await page.goto(book.readerUrl);
const [, workId] =
/^https:[/][/]play[.]dlsite[.]com[/]#[/]work[/]([^/]+)/.exec(
book.readerUrl,
) ?? [];
if (!workId) {
throw new Error(`workId is not included: ${book.readerUrl}`);
}
const url = `https://www.dlsite.com/home/download/=/product_id/${workId}.html`;
const imageFile = { url };
return [
async () => {
const blob = await browser.drawImage(page, imageFile);
process.stderr.write(".");
return blob;
},
];
},
loginEndpoints: ["https://www.dlsite.com/home/login"],
loginSuccessUrl: (url: URL) => url.origin === "https://www.dlsite.com",
logoutEndpoints: ["https://www.dlsite.com/home/logout"],
};
}
DlsiteManiax.siteUrl = (url: URL) =>
url.href.startsWith("https://play.dlsite.com/#/work/");

View file

@ -1,214 +0,0 @@
import type { Book } from "../library";
import type { Browser, BrowserContext, ImageFile } from "../browser";
var NFBR: any;
export async function getImageFiles(): Promise<Array<ImageFile>> {
const params = new URLSearchParams(location.search);
const model = new NFBR.a6G.Model({
settings: new NFBR.Settings("NFBR.SettingData"),
viewerFontSize: NFBR.a0X.a3K,
viewerFontFace: NFBR.a0X.a3k,
viewerSpreadDouble: true,
viewerSpread: {},
});
const a6l = new NFBR.a6G.a6L(model);
const a2f = new NFBR.a2F();
const a5w = await a2f.a5W({
contentId: params.get(NFBR.a5q.Key.CONTENT_ID),
a6m: params.get(NFBR.a5q.Key.a6M),
preview:
params.get(NFBR.a5q.Key.LOOK_INSIDE) !== NFBR.a5q.LookInsideType.DISABLED,
previewType:
params.get(NFBR.a5q.Key.LOOK_INSIDE) ?? NFBR.a5q.LookInsideType.DISABLED,
contentType: a6l.getContentType(),
title: true,
});
const content = new NFBR.a6i.Content(a5w.url);
const a5n = new NFBR.a5n();
await a5n.a5s(content, "configuration", a6l);
const imageFiles: Array<ImageFile> = [];
for (const index of Object.keys(content.files)) {
const file = content.files[index];
const conf = content.configuration.contents[index];
const {
No,
DummyWidth,
DummyHeight,
Size: { Width, Height },
} = file.FileLinkInfo.PageLinkInfoList[0].Page;
const page = new NFBR.a6i.Page(
`${conf.file}/${No}.jpeg`,
index,
`${conf["original-file-path"]}#-acs-position-${file.PageToBookmark[0][0]}-${file.PageToBookmark[0][1]}`,
);
const w = [...`${conf.file}/${No}`]
.map((c) => c.charCodeAt(0))
.reduce((a, cc) => a + cc, 0);
const pattern = (w % NFBR.a0X.a3h) + 1;
const blocks = NFBR.a3E.a3f(
Width + DummyWidth,
Height + DummyHeight,
NFBR.a0X.a3g,
NFBR.a0X.a3G,
pattern,
);
const url = `${a5w.url}${page.url}`;
imageFiles.push({
url,
blocks,
width: Width,
height: Height,
});
}
return imageFiles;
}
export function DmmBooks(browser: Browser) {
async function* getSeriesBooks(
ctx: BrowserContext,
series: {
seriesId: string;
shopName: string;
title: string;
authors: Array<string>;
},
): AsyncGenerator<Book> {
const endpoint = `https://book.dmm.com/ajax/bff/contents/?shop_name=${series.shopName}&series_id=${series.seriesId}`;
const pager = {
page: 1,
perPage: 0,
totalCount: Infinity,
};
while ((pager.page - 1) * pager.perPage <= pager.totalCount) {
const res = await ctx.request.get(`${endpoint}&page=${pager.page}`);
if (!res.ok()) {
throw new Error(`${res.status()} ${res.statusText()}`);
}
const body: {
volume_books: Array<{
title: string;
purchased?: {
streaming_url: string;
};
}>;
pager: {
page: number;
per_page: number;
total_count: number;
};
} = await res.json();
for (const book of body.volume_books.filter((b) => b.purchased)) {
yield {
id: NaN,
platform: "dmm-books",
readerUrl: book.purchased?.streaming_url!,
title: book.title || series.title || "",
authors: series.authors,
};
process.stderr.write(".");
}
pager.page += 1;
pager.perPage = body.pager.per_page;
pager.totalCount = body.pager.total_count;
}
}
async function* getAllBooks(ctx: BrowserContext): AsyncGenerator<Book> {
const endpoint = "https://book.dmm.com/ajax/bff/library/?shop_name=all";
const pager = {
page: 1,
perPage: 0,
totalCount: Infinity,
};
while ((pager.page - 1) * pager.perPage <= pager.totalCount) {
const res = await ctx.request.get(`${endpoint}&page=${pager.page}`);
if (!res.ok()) {
throw new Error(`${res.status()} ${res.statusText()}`);
}
const body: {
series_books: Array<{
shop_name: string;
series_id: string;
title: string;
author: Array<string>;
}>;
pager: {
page: number;
per_page: number;
total_count: number;
};
} = await res.json();
for (const series of body.series_books) {
yield* getSeriesBooks(ctx, {
seriesId: series.series_id,
shopName: series.shop_name,
title: series.title,
authors: series.author,
});
}
pager.page += 1;
pager.perPage = body.pager.per_page;
pager.totalCount = body.pager.total_count;
}
}
return {
async *pull(): AsyncGenerator<Book> {
const ctx = await browser.loadBrowserContext("dmm-books");
yield* getAllBooks(ctx);
process.stderr.write(`\n`);
},
async getFiles(book: Book): Promise<Array<() => Promise<Blob>>> {
const ctx = await browser.loadBrowserContext("dmm-books");
const page = await ctx.newPage();
await page.goto(book.readerUrl);
const imageFiles = await page.evaluate(getImageFiles);
return imageFiles.map((imageFile) => async () => {
const blob = await browser.drawImage(page, imageFile);
process.stderr.write(".");
return blob;
});
},
loginEndpoints: [
"https://accounts.dmm.com/service/login/password",
"https://www.dmm.com/service/-/exchange",
],
loginSuccessUrl: (url: URL) =>
["https://www.dmm.com/", "https://www.dmm.co.jp/top/"].includes(url.href),
logoutEndpoints: [
"https://accounts.dmm.com/service/logout",
"https://accounts.dmm.co.jp/service/logout",
],
};
}
DmmBooks.siteUrl = (url: URL) =>
["https://book.dmm.com", "https://book.dmm.co.jp"].includes(url.origin);

View file

@ -1,128 +0,0 @@
import type { Book } from "../library";
import type { Browser, BrowserContext, ImageFile } from "../browser";
import { getImageFiles } from "./dmm-books";
export function FanzaDoujin(browser: Browser) {
async function* getAllBooks(ctx: BrowserContext): AsyncGenerator<Book> {
const endpoint =
"https://www.dmm.co.jp/dc/doujin/api/mylibraries/?limit=20";
const pager = {
page: 1,
perPage: 20,
totalCount: Infinity,
};
while ((pager.page - 1) * pager.perPage <= pager.totalCount) {
const res = await ctx.request.get(`${endpoint}&page=${pager.page}`);
if (!res.ok()) {
throw new Error(`${res.status()} ${res.statusText()}`);
}
const body: {
data: {
items: Record<
string,
Array<{
productId: string;
title: string;
makerName: string;
}>
>;
total: number;
};
} = await res.json();
for (const item of Object.values(body.data.items).flat()) {
yield {
id: NaN,
platform: "fanza-doujin",
readerUrl: `https://www.dmm.co.jp/dc/-/mylibrary/detail/=/product_id=${item.productId}/`,
title: item.title || "",
authors: [item.makerName],
};
process.stderr.write(".");
}
pager.page += 1;
pager.totalCount = body.data.total;
}
}
return {
async *pull(): AsyncGenerator<Book> {
const ctx = await browser.loadBrowserContext("fanza-doujin");
yield* getAllBooks(ctx);
process.stderr.write(`\n`);
},
async getFiles(book: Book): Promise<Array<() => Promise<Blob>>> {
const ctx = await browser.loadBrowserContext("fanza-doujin");
const page = await ctx.newPage();
await page.goto(book.readerUrl);
const [, productId] = /product_id=([^/]*)/.exec(book.readerUrl) ?? [];
if (!productId) {
throw new Error(`product_id is not included: ${book.readerUrl}`);
}
const res = await ctx.request.get(
`https://www.dmm.co.jp/dc/doujin/api/mylibraries/details/${productId}/`,
);
if (!res.ok()) {
throw new Error(`${res.status()} ${res.statusText()}`);
}
const body: {
data: {
drm: {
dmmBooks: boolean;
softDenchi: boolean;
};
downloadLinks: Record<number, string>;
};
} = await res.json();
const imageFiles: Array<ImageFile> = [];
if (body.data.drm.dmmBooks) {
await page.waitForSelector(`li[class^="fileTreeItem"]`);
await page.click(`li[class^="fileTreeItem"]>a`);
await page.waitForURL((url) =>
url.href.startsWith(
"https://www.dmm.co.jp/dc/-/viewer/=/product_id=",
),
);
imageFiles.push(...(await page.evaluate(getImageFiles)));
} else {
for (const link of Object.values(body.data.downloadLinks)) {
const url = new URL(link, "https://www.dmm.co.jp/").href;
imageFiles.push({ url });
}
}
return imageFiles.map((imageFile) => async () => {
const blob = await browser.drawImage(page, imageFile);
process.stderr.write(".");
return blob;
});
},
loginEndpoints: ["https://accounts.dmm.co.jp/service/login/password"],
loginSuccessUrl: (url: URL) => url.href === "https://www.dmm.co.jp/top/",
logoutEndpoints: ["https://accounts.dmm.co.jp/service/logout"],
};
}
FanzaDoujin.siteUrl = (url: URL) =>
url.href.startsWith(
"https://www.dmm.co.jp/dc/-/mylibrary/detail/=/product_id=",
);

View file

@ -1,136 +0,0 @@
import type { Book } from "../library";
import type { Browser, ImageFile } from "../browser";
async function getImageFiles(): Promise<Array<ImageFile>> {
const pages: NodeListOf<HTMLElement> = await new Promise(async function (
resolve,
reject,
) {
const timeout = setTimeout(() => {
reject(new Error("Page loading timeout."));
}, 60_000);
let pages: NodeListOf<HTMLElement>;
while (true) {
pages = document.querySelectorAll("reader-page");
const loaded =
pages.length > 0 &&
[...pages].every((page) => page.classList.contains("-gb-loaded"));
if (loaded) {
break;
} else {
await new Promise((resolve) => setTimeout(resolve, 100));
}
}
resolve(pages);
clearTimeout(timeout);
});
const images: Array<SVGImageElement> = [...pages].map(
(el) => el.querySelector("svg image")!,
);
return [...images].map((image) => ({ url: image.href.baseVal }));
}
export function GooglePlayBooks(browser: Browser) {
return {
async *pull(): AsyncGenerator<Book> {
const ctx = await browser.loadBrowserContext("google-play-books");
const page = await ctx.newPage();
await page.goto(
"https://play.google.com/books?type=comics&source=purchases",
);
await page.waitForSelector("gpb-library-card");
for (const metadata of await page.$$("gpb-library-card .metadata")) {
const readerUrl = await metadata.$eval("a", (a) => a.href);
const [title, author] = (await metadata.innerText()).split("\n");
yield {
id: NaN,
platform: "google-play-books",
readerUrl,
title,
authors: [author],
};
process.stderr.write(".");
}
process.stderr.write(`\n`);
},
async getFiles(book: Book): Promise<Array<() => Promise<Blob>>> {
const ctx = await browser.loadBrowserContext("google-play-books");
const page = await ctx.newPage();
await page.goto(book.readerUrl);
await page.waitForSelector(".display");
const frame = page.frames().at(-1);
if (!frame) {
throw new Error("Frame not found.");
}
await frame.evaluate(function scrollToTop() {
const viewport = document.querySelector("cdk-virtual-scroll-viewport");
viewport?.scroll({ top: 0 });
});
async function next(): Promise<boolean> {
return await frame!.evaluate(function scroll() {
const viewport = document.querySelector(
"cdk-virtual-scroll-viewport",
);
if (!viewport) throw new Error("Viewport not found.");
const hasNext =
1 <=
Math.abs(
viewport.scrollHeight -
viewport.clientHeight -
viewport.scrollTop,
);
if (hasNext) {
viewport.scrollBy({ top: viewport.clientHeight });
}
return hasNext;
});
}
const fileMap: Map<string, () => Promise<Blob>> = new Map();
while (await next()) {
const imageFiles = await frame.evaluate(getImageFiles);
for (const imageFile of imageFiles) {
if (fileMap.has(imageFile.url)) continue;
const blob = await browser.drawImage(frame, imageFile);
process.stderr.write(".");
fileMap.set(imageFile.url, async () => blob);
}
}
return [...fileMap.values()];
},
loginEndpoints: ["https://accounts.google.com"],
loginSuccessUrl: (url: URL) =>
url.origin === "https://myaccount.google.com",
logoutEndpoints: ["https://accounts.google.com/Logout"],
};
}
GooglePlayBooks.siteUrl = (url: URL) =>
url.origin === "https://play.google.com";