Compare commits

...

53 Commits

Author SHA1 Message Date
Jakub Jirutka 7c61b462dd disable unnecessary/unused regex features to reduce binary size
This will reduce the monolith binary size by ~15%.
2022-09-20 11:46:26 -04:00
Simone Mosciatti ef3684025b move to use http instead of https 2022-09-11 14:30:44 -04:00
Simone Mosciatti db7ee697b3 rewrite small part of the input argument handling
the commit rewrite a small part of the input argument handling, trying
to follow besr rust practices.
We get rid of a variable and of a mutable reference while keeping the
code a bit more coincise.
2022-09-11 14:30:44 -04:00
Sunshine 89ce5029b9
add option to blacklist/whitelist domains 2022-09-01 13:35:52 -10:00
dependabot[bot] 54609b10e5
Bump iana-time-zone from 0.1.44 to 0.1.46 (#316)
Bumps [iana-time-zone](https://github.com/strawlab/iana-time-zone) from 0.1.44 to 0.1.46.
- [Release notes](https://github.com/strawlab/iana-time-zone/releases)
- [Changelog](https://github.com/strawlab/iana-time-zone/blob/main/CHANGELOG.md)
- [Commits](https://github.com/strawlab/iana-time-zone/compare/0.1.44...v0.1.46)

---
updated-dependencies:
- dependency-name: iana-time-zone
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-08-31 11:35:38 -10:00
Sunshine 013d93bacc
update 3rd-party dependencies and bump version number 2022-08-14 05:12:39 -10:00
Sunshine 0df8613789
Rewrite part of function retrieve_asset, include support for brotli and deflate (#312)
do not crash the app if reqwest throws, add support for deflate & brotli
2022-08-06 19:07:39 -10:00
Sunshine 68a1531a11
Update packages (#313)
update dependencies
2022-08-06 18:21:53 -10:00
Sunshine 99c3be1804
Merge pull request #308 from Y2Z/dependabot/cargo/tokio-1.16.1
Bump tokio from 1.12.0 to 1.16.1
2022-08-06 17:07:18 -10:00
Sunshine 80559e7224
Merge pull request #309 from Y2Z/dependabot/cargo/regex-1.5.5
Bump regex from 1.5.4 to 1.5.5
2022-08-06 16:56:18 -10:00
dependabot[bot] c5c5f1ca44
Bump regex from 1.5.4 to 1.5.5
Bumps [regex](https://github.com/rust-lang/regex) from 1.5.4 to 1.5.5.
- [Release notes](https://github.com/rust-lang/regex/releases)
- [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rust-lang/regex/compare/1.5.4...1.5.5)

---
updated-dependencies:
- dependency-name: regex
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-06 21:07:06 +00:00
dependabot[bot] de6a13a884
Bump tokio from 1.12.0 to 1.16.1
Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.12.0 to 1.16.1.
- [Release notes](https://github.com/tokio-rs/tokio/releases)
- [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.12.0...tokio-1.16.1)

---
updated-dependencies:
- dependency-name: tokio
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-06 19:44:19 +00:00
Sunshine ef16355f9f
Merge pull request #303 from timoteostewart/master
fix typo 'non-standart' to 'non-standard'
2022-03-17 04:21:16 -04:00
Tim Stewart a4dc0ed9b4
fix typo 'non-standart' to 'non-standard' 2022-03-16 17:54:48 -05:00
Sunshine cd0e366979
Merge pull request #301 from liamwarfield/patch-1
Updated monk project link
2022-02-22 15:22:33 -10:00
Liam Warfield d4c6c458f9
Updated monk project link
The monk project has recently moved to Github! Just changing the link here to the new repo.
2022-02-22 14:17:40 -07:00
Sunshine c9970b3a8e
Merge pull request #292 from snshn/include-unsafe-eval-origin-for-isolated-documents
Include unsafe-eval origin for isolated documents
2021-12-05 20:26:44 -10:00
Sunshine 404d322b99
make tests pass for newly added 'unsafe-eval' origin addition 2021-12-05 20:16:37 -10:00
Sunshine 1b353d0b46
include unsafe-eval origin for isolated documents 2021-12-05 20:09:26 -10:00
Sunshine f920a5e4d6
Merge pull request #290 from matildepark/patch-1
README: remove duplicate macports instructions
2021-11-10 20:33:35 -10:00
matildepark d3ca1ecad3
README: remove duplicate macports instructions 2021-11-10 23:10:31 -05:00
Sunshine 9e057472c6
Update README.md 2021-10-20 16:21:55 -10:00
Sunshine d453145bf8
Merge pull request #288 from snshn/update-markdown-files
Update Markdown files
2021-10-20 15:54:07 -10:00
Sunshine 8c131d649f
update Markdown files 2021-10-20 15:46:08 -10:00
Sunshine a221fdb368
Merge pull request #287 from snshn/ci-ignore-some-files
Update README files and set CI to ignore irrelevant paths
2021-10-20 15:40:43 -10:00
Sunshine 15dd82e300
update README files, set CI to ignore irrelevant paths 2021-10-20 15:31:54 -10:00
Sunshine de492caaa5
Merge pull request #286 from snshn/move-test-data
Move test data files under _data_
2021-10-17 22:51:22 -10:00
Sunshine 9096447c70
move test data files under _data_ 2021-10-17 22:46:06 -10:00
Sunshine 354340db86
Merge pull request #285 from snshn/use-percent-encoding-crate
Offload percent decoding to percent-encoding crate
2021-10-17 22:32:10 -10:00
Sunshine 900dd8d163
offload percent decoding to percent-encoding crate 2021-10-17 22:26:11 -10:00
Sunshine a11c4496b0
Merge pull request #284 from snshn/move-tests-to-upper-level
Get rid of macros, move tests out of src
2021-10-16 21:39:53 -10:00
Sunshine dd33b16876
Merge pull request #283 from snshn/formatting
Format README.md and annotate workflows
2021-10-16 21:16:53 -10:00
Sunshine 2cc1870033
get rid of macros, move tests out of src 2021-10-16 21:16:37 -10:00
Sunshine d41e6c041b
format README.md and annotate workflows 2021-10-16 18:48:32 -10:00
Sunshine 460a461373
Update README.md 2021-07-14 00:09:41 -10:00
Sunshine 1e6e87b6aa
Merge pull request #277 from Oliver-Hanikel/master
Reduce size of Docker image
2021-07-11 11:45:18 -10:00
Oliver Hanikel 54094270b3 Update run-in-container.sh 2021-07-11 20:07:48 +02:00
Oliver Hanikel e6cf367e23 reduce size of docker image 2021-07-11 20:00:39 +02:00
Sunshine e8437ecb28
Update README.md 2021-07-10 16:41:30 -10:00
Sunshine 543bebbd8d
Merge pull request #275 from snshn/improve-readme-code-snippets
Remove dollar signs from code snippets
2021-07-10 16:40:20 -10:00
Sunshine dc6c0200bc
remove dollar sign from code snippets 2021-07-10 16:32:56 -10:00
Sunshine 04bdb3072f
Update README.md 2021-07-08 13:14:37 -10:00
Sunshine a9228f0522
Merge pull request #274 from snshn/arm64-cd-job
Downgrade AArch64 CD job from Ubuntu 20.04 to Ubuntu 18.04
2021-07-06 15:29:55 -10:00
Sunshine aae68c4c82
downgrade AArch64 CD job from Ubuntu 20.04 to Ubuntu 18.04 2021-07-06 14:41:56 -10:00
Sunshine dd23826205
Merge pull request #273 from herbygillot/patch-1
README: add MacPorts install instructions
2021-07-04 21:16:18 -10:00
Herby Gillot 781f4cd3b5
README: add MacPorts install instructions 2021-07-05 03:07:55 -04:00
Sunshine 6826b59ab9
Merge pull request #272 from snshn/new-release
New release (2.6.1)
2021-07-03 19:39:32 -10:00
Sunshine 2be725eeb5
bump version number (2.6.0 -> 2.6.1) 2021-07-03 19:33:09 -10:00
Sunshine dd2e9ca2e5
update crates 2021-07-03 19:31:55 -10:00
Sunshine 50bccae476
Merge pull request #267 from snshn/aarch64-binary
Add GNU/Linux AArch64 CD job
2021-07-03 00:15:04 -10:00
Sunshine b3bcb1d85b
add GNU/Linux AArch64 CD job 2021-07-03 00:10:14 -10:00
Sunshine c58d044459
Merge pull request #271 from snshn/fix-charset-detection-mechanism
Fix charset detection logic
2021-07-02 21:47:56 -10:00
Sunshine eeaea0df16
fix use of wrong charset 2021-07-02 21:35:06 -10:00
83 changed files with 1333 additions and 906 deletions

View File

@ -3,6 +3,17 @@ name: GNU/Linux
on:
push:
branches: [ master ]
paths-ignore:
- 'assets/'
- 'dist/'
- 'docs/'
- 'snap/'
- '.adr-dir'
- 'Dockerfile'
- 'LICENSE'
- 'Makefile'
- 'monolith.nuspec'
- 'README.md'
jobs:
build:
@ -17,6 +28,8 @@ jobs:
steps:
- run: git config --global core.autocrlf false
- uses: actions/checkout@v2
- name: Build
run: cargo build --all --locked --verbose

View File

@ -3,6 +3,17 @@ name: macOS
on:
push:
branches: [ master ]
paths-ignore:
- 'assets/'
- 'dist/'
- 'docs/'
- 'snap/'
- '.adr-dir'
- 'Dockerfile'
- 'LICENSE'
- 'Makefile'
- 'monolith.nuspec'
- 'README.md'
jobs:
build:
@ -17,6 +28,8 @@ jobs:
steps:
- run: git config --global core.autocrlf false
- uses: actions/checkout@v2
- name: Build
run: cargo build --all --locked --verbose

View File

@ -3,6 +3,17 @@ name: Windows
on:
push:
branches: [ master ]
paths-ignore:
- 'assets/'
- 'dist/'
- 'docs/'
- 'snap/'
- '.adr-dir'
- 'Dockerfile'
- 'LICENSE'
- 'Makefile'
- 'monolith.nuspec'
- 'README.md'
jobs:
build:
@ -17,6 +28,8 @@ jobs:
steps:
- run: git config --global core.autocrlf false
- uses: actions/checkout@v2
- name: Build
run: cargo build --all --locked --verbose

View File

@ -1,4 +1,4 @@
# CD GitHub Actions workflow for Monolith
# CD GitHub Actions workflow for monolith
name: CD
@ -13,10 +13,13 @@ jobs:
runs-on: windows-2019
steps:
- run: git config --global core.autocrlf false
- name: Checkout the repository
uses: actions/checkout@v2
- name: Build the executable
run: cargo build --release
- uses: Shopify/upload-to-release@1.0.0
with:
name: monolith.exe
@ -28,24 +31,27 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@v2
- name: Prepare cross-platform environment
run: |
sudo mkdir -p /cross-build-arm
sudo mkdir /cross-build
sudo touch /etc/apt/sources.list.d/armhf.list
echo "deb [arch=armhf] http://ports.ubuntu.com/ubuntu-ports/ bionic main" | sudo tee -a /etc/apt/sources.list.d/armhf.list
sudo apt-get update
sudo apt-get install -y gcc-arm-linux-gnueabihf libc6-armhf-cross libc6-dev-armhf-cross
sudo apt-get download libssl1.1:armhf libssl-dev:armhf
sudo dpkg -x libssl1.1*.deb /cross-build-arm
sudo dpkg -x libssl-dev*.deb /cross-build-arm
sudo dpkg -x libssl1.1*.deb /cross-build
sudo dpkg -x libssl-dev*.deb /cross-build
rustup target add arm-unknown-linux-gnueabihf
echo "C_INCLUDE_PATH=/cross-build-arm/usr/include" >> $GITHUB_ENV
echo "OPENSSL_INCLUDE_DIR=/cross-build-arm/usr/include/arm-linux-gnueabihf" >> $GITHUB_ENV
echo "OPENSSL_LIB_DIR=/cross-build-arm/usr/lib/arm-linux-gnueabihf" >> $GITHUB_ENV
echo "C_INCLUDE_PATH=/cross-build/usr/include" >> $GITHUB_ENV
echo "OPENSSL_INCLUDE_DIR=/cross-build/usr/include/arm-linux-gnueabihf" >> $GITHUB_ENV
echo "OPENSSL_LIB_DIR=/cross-build/usr/lib/arm-linux-gnueabihf" >> $GITHUB_ENV
echo "PKG_CONFIG_ALLOW_CROSS=1" >> $GITHUB_ENV
echo "RUSTFLAGS=-C linker=arm-linux-gnueabihf-gcc -L/usr/arm-linux-gnueabihf/lib -L/cross-build-arm/usr/lib/arm-linux-gnueabihf -L/cross-build-arm/lib/arm-linux-gnueabihf" >> $GITHUB_ENV
echo "RUSTFLAGS=-C linker=arm-linux-gnueabihf-gcc -L/usr/arm-linux-gnueabihf/lib -L/cross-build/usr/lib/arm-linux-gnueabihf -L/cross-build/lib/arm-linux-gnueabihf" >> $GITHUB_ENV
- name: Build the executable
run: cargo build --release --target=arm-unknown-linux-gnueabihf
- name: Attach artifact to the release
uses: Shopify/upload-to-release@1.0.0
with:
@ -53,13 +59,48 @@ jobs:
path: target/arm-unknown-linux-gnueabihf/release/monolith
repo-token: ${{ secrets.GITHUB_TOKEN }}
gnu_linux_aarch64:
runs-on: ubuntu-18.04
steps:
- name: Checkout the repository
uses: actions/checkout@v2
- name: Prepare cross-platform environment
run: |
sudo mkdir /cross-build
sudo touch /etc/apt/sources.list.d/arm64.list
echo "deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports/ bionic main" | sudo tee -a /etc/apt/sources.list.d/arm64.list
sudo apt-get update
sudo apt-get install -y gcc-aarch64-linux-gnu libc6-arm64-cross libc6-dev-arm64-cross
sudo apt-get download libssl1.1:arm64 libssl-dev:arm64
sudo dpkg -x libssl1.1*.deb /cross-build
sudo dpkg -x libssl-dev*.deb /cross-build
rustup target add aarch64-unknown-linux-gnu
echo "C_INCLUDE_PATH=/cross-build/usr/include" >> $GITHUB_ENV
echo "OPENSSL_INCLUDE_DIR=/cross-build/usr/include/aarch64-linux-gnu" >> $GITHUB_ENV
echo "OPENSSL_LIB_DIR=/cross-build/usr/lib/aarch64-linux-gnu" >> $GITHUB_ENV
echo "PKG_CONFIG_ALLOW_CROSS=1" >> $GITHUB_ENV
echo "RUSTFLAGS=-C linker=aarch64-linux-gnu-gcc -L/usr/aarch64-linux-gnu/lib -L/cross-build/usr/lib/aarch64-linux-gnu" >> $GITHUB_ENV
- name: Build the executable
run: cargo build --release --target=aarch64-unknown-linux-gnu
- name: Attach artifact to the release
uses: Shopify/upload-to-release@1.0.0
with:
name: monolith-gnu-linux-aarch64
path: target/aarch64-unknown-linux-gnu/release/monolith
repo-token: ${{ secrets.GITHUB_TOKEN }}
gnu_linux_x86_64:
runs-on: ubuntu-18.04
steps:
- name: Checkout the repository
uses: actions/checkout@v2
- name: Build the executable
run: cargo build --release
- uses: Shopify/upload-to-release@1.0.0
with:
name: monolith-gnu-linux-x86_64

View File

@ -1,8 +1,21 @@
# CI GitHub Actions workflow for monolith
name: CI
on:
pull_request:
branches: [ master ]
paths-ignore:
- 'assets/'
- 'dist/'
- 'docs/'
- 'snap/'
- '.adr-dir'
- 'Dockerfile'
- 'LICENSE'
- 'Makefile'
- 'monolith.nuspec'
- 'README.md'
jobs:
build_and_test:
@ -21,11 +34,15 @@ jobs:
steps:
- run: git config --global core.autocrlf false
- uses: actions/checkout@v2
- name: Build
run: cargo build --all --locked --verbose
- name: Run tests
run: cargo test --all --locked --verbose
- name: Check code formatting
run: |
rustup component add rustfmt

910
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package]
name = "monolith"
version = "2.6.0"
version = "2.6.2"
authors = [
"Sunshine <sunshine@uberspace.net>",
"Mahdi Robatipoor <mahdi.robatipoor@gmail.com>",
@ -23,20 +23,26 @@ license = "CC0-1.0"
[dependencies]
atty = "0.2.14" # Used for highlighting network errors
base64 = "0.13.0"
chrono = "0.4.19" # Used for formatting creation timestamp
clap = "2.33.3"
cssparser = "0.28.1"
encoding_rs = "0.8.28"
base64 = "0.13.0" # Used for integrity attributes
chrono = "0.4.20" # Used for formatting creation timestamp
clap = "3.2.16"
cssparser = "0.29.6"
encoding_rs = "0.8.31"
html5ever = "0.24.1"
regex = "1.5.4" # Used for parsing srcset and NOSCRIPT
sha2 = "0.9.5" # Used for calculating checksums during integrity checks
percent-encoding = "2.1.0"
sha2 = "0.10.2" # Used for calculating checksums during integrity checks
url = "2.2.2"
[dependencies.reqwest]
version = "0.11.3"
# Used for parsing srcset and NOSCRIPT
[dependencies.regex]
version = "1.6.0"
default-features = false
features = ["default-tls", "blocking", "gzip"]
features = ["std", "perf-dfa", "unicode-perl"]
[dependencies.reqwest]
version = "0.11.11"
default-features = false
features = ["default-tls", "blocking", "gzip", "brotli", "deflate"]
[dev-dependencies]
assert_cmd = "1.0.5"
assert_cmd = "2.0.4"

View File

@ -1,18 +1,22 @@
FROM rust
WORKDIR /usr/local/src/
RUN curl -s https://api.github.com/repos/y2z/monolith/releases/latest \
| grep "tarball_url.*\"," \
| cut -d '"' -f 4 \
| wget -qi - -O monolith.tar.gz
FROM ekidd/rust-musl-builder as builder
RUN curl -L -o monolith.tar.gz $(curl -s https://api.github.com/repos/y2z/monolith/releases/latest \
| grep "tarball_url.*\"," \
| cut -d '"' -f 4)
RUN tar xfz monolith.tar.gz \
&& mv Y2Z-monolith-* monolith \
&& rm monolith.tar.gz
WORKDIR /usr/local/src/monolith
RUN ls -a
WORKDIR monolith/
RUN make install
FROM alpine
RUN apk update && \
apk add --no-cache openssl && \
rm -rf "/var/cache/apk/*"
COPY --from=builder /home/rust/.cargo/bin/monolith /usr/bin/monolith
WORKDIR /tmp
CMD ["/usr/local/cargo/bin/monolith"]
ENTRYPOINT ["/usr/bin/monolith"]

106
README.md
View File

@ -1,6 +1,6 @@
[![Monolith Build Status for GNU/Linux](https://github.com/Y2Z/monolith/workflows/GNU%2FLinux/badge.svg)](https://github.com/Y2Z/monolith/actions?query=workflow%3AGNU%2FLinux)
[![Monolith Build Status for macOS](https://github.com/Y2Z/monolith/workflows/macOS/badge.svg)](https://github.com/Y2Z/monolith/actions?query=workflow%3AmacOS)
[![Monolith Build Status for Windows](https://github.com/Y2Z/monolith/workflows/Windows/badge.svg)](https://github.com/Y2Z/monolith/actions?query=workflow%3AWindows)
[![monolith build status on GNU/Linux](https://github.com/Y2Z/monolith/workflows/GNU%2FLinux/badge.svg)](https://github.com/Y2Z/monolith/actions?query=workflow%3AGNU%2FLinux)
[![monolith build status on macOS](https://github.com/Y2Z/monolith/workflows/macOS/badge.svg)](https://github.com/Y2Z/monolith/actions?query=workflow%3AmacOS)
[![monolith build status on Windows](https://github.com/Y2Z/monolith/workflows/Windows/badge.svg)](https://github.com/Y2Z/monolith/actions?query=workflow%3AWindows)
```
_____ ______________ __________ ___________________ ___
@ -18,61 +18,104 @@ Unlike the conventional “Save page as”, `monolith` not only saves the target
If compared to saving websites with `wget -mpk`, this tool embeds all assets as data URLs and therefore lets browsers render the saved page exactly the way it was on the Internet, even when no network connection is available.
---------------------------------------------------
## Installation
#### Using [Cargo](https://crates.io/crates/monolith)
$ cargo install monolith
```console
cargo install monolith
```
#### Via [Homebrew](https://formulae.brew.sh/formula/monolith) (macOS and GNU/Linux)
$ brew install monolith
```console
brew install monolith
```
#### Via [MacPorts](https://ports.macports.org/port/monolith/summary) (macOS)
```console
sudo port install monolith
```
#### Using [Snapcraft](https://snapcraft.io/monolith) (GNU/Linux)
$ snap install monolith
```console
snap install monolith
```
#### Using [FreeBSD packages](https://svnweb.freebsd.org/ports/head/www/monolith/) (FreeBSD)
$ pkg install monolith
```console
pkg install monolith
```
#### Using [FreeBSD ports](https://www.freshports.org/www/monolith/) (FreeBSD)
$ cd /usr/ports/www/monolith/
$ make install clean
```console
cd /usr/ports/www/monolith/
make install clean
```
#### Using [pkgsrc](https://pkgsrc.se/www/monolith) (NetBSD, OpenBSD, Haiku, etc)
$ cd /usr/pkgsrc/www/monolith
$ make install clean
#### Using [pre-built binaries](https://github.com/Y2Z/monolith/releases) (Windows, ARM-based devices, etc)
Every release contains pre-built binaries for Windows, GNU/Linux, as well as platforms with non-standart CPU architecture.
```console
cd /usr/pkgsrc/www/monolith
make install clean
```
#### Using [containers](https://www.docker.com/)
$ docker build -t Y2Z/monolith .
$ sudo install -b utils/run-in-container.sh /usr/local/bin/monolith
#### From source
```console
docker build -t Y2Z/monolith .
sudo install -b dist/run-in-container.sh /usr/local/bin/monolith
```
Dependency: `libssl-dev`
#### From [source](https://github.com/Y2Z/monolith)
Dependency: `libssl`
```console
git clone https://github.com/Y2Z/monolith.git
cd monolith
make install
```
#### Using [pre-built binaries](https://github.com/Y2Z/monolith/releases) (Windows, ARM-based devices, etc)
Every release contains pre-built binaries for Windows, GNU/Linux, as well as platforms with non-standard CPU architecture.
$ git clone https://github.com/Y2Z/monolith.git
$ cd monolith
$ make install
---------------------------------------------------
## Usage
$ monolith https://lyrics.github.io/db/P/Portishead/Dummy/Roads/ -o portishead-roads-lyrics.html
or
$ cat index.html | monolith -aIiFfcMv -b https://original.site/ - > result.html
```console
monolith https://lyrics.github.io/db/P/Portishead/Dummy/Roads/ -o portishead-roads-lyrics.html
```
```console
cat index.html | monolith -aIiFfcMv -b https://original.site/ - > result.html
```
---------------------------------------------------
## Options
- `-a`: Exclude audio sources
- `-b`: Use custom `base URL`
- `-c`: Exclude CSS
- `-C`: Save document using custom `charset`
- `-d`: Allow retrieving assets only from specified `domain(s)`
- `-e`: Ignore network errors
- `-E`: Avoid retrieving assets located within specified domains
- `-f`: Omit frames
- `-F`: Exclude web fonts
- `-i`: Remove images
@ -81,39 +124,52 @@ or
- `-k`: Accept invalid X.509 (TLS) certificates
- `-M`: Don't add timestamp and URL information
- `-n`: Extract contents of NOSCRIPT elements
- `-o`: Write output to `file`, use “-” for STDOUT
- `-o`: Write output to `file` (use “-” for STDOUT)
- `-s`: Be quiet
- `-t`: Adjust `network request timeout`
- `-u`: Provide custom `User-Agent`
- `-v`: Exclude videos
---------------------------------------------------
## Proxies
Please set `https_proxy`, `http_proxy`, and `no_proxy` environment variables.
---------------------------------------------------
## Contributing
Please open an issue if something is wrong, that helps make this project better.
---------------------------------------------------
## Related projects
- Monolith Chrome Extension: https://github.com/rhysd/monolith-of-web
- Pagesaver: https://github.com/distributed-mind/pagesaver
- Personal WayBack Machine: https://github.com/popey/pwbm
- Hako: https://github.com/dmpop/hako
- Monk: https://gitlab.com/fisherdarling/monk
- Monk: https://github.com/monk-dev/monk
---------------------------------------------------
## License
To the extent possible under law, the author(s) have dedicated all copyright related and neighboring rights to this software to the public domain worldwide.
This software is distributed without any warranty.
---------------------------------------------------
<!-- Microtext -->
<sub>Keep in mind that `monolith` is not aware of your browsers session</sub>

View File

@ -7,4 +7,4 @@ if which podman 2>&1 > /dev/null; then
DOCKER=podman
fi
$DOCKER run --rm Y2Z/$PROG_NAME $PROG_NAME "$@"
$DOCKER run --rm Y2Z/$PROG_NAME "$@"

View File

@ -1,10 +1,23 @@
# Web apps that can be saved with Monolith
These apps retain most or all of their functionality when saved with Monolith
These apps retain all or most of their functionality when saved with Monolith:
|Converse|https://conversejs.org|
|:--|:--|
|Description|An XMPP client built using web technologies|
|Functionality retained|**full**|
|Command to use|`monolith https://conversejs.org/fullscreen.html > conversejs.html`|
|Monolith version used|2.2.7|
## Converse
| Website | https://conversejs.org |
|:-----------------------|:--------------------------------------------------------------------|
| Description | An XMPP client built using web technologies |
| Functionality retained | **full** |
| Command to use | `monolith https://conversejs.org/fullscreen.html > conversejs.html` |
| Monolith version used | 2.2.7 |
## Markdown Tables generator
| Website | https://www.tablesgenerator.com |
|:--------------------------|:-----------------------------------------------------------------------------------------------|
| Description | Tool for creating tables in extended Markdown format |
| Functionality retained | **full** |
| Command to use | `monolith -I https://www.tablesgenerator.com/markdown_tables -o markdown-table-generator.html` |
| Monolith version used | 2.6.1 |

View File

@ -6,7 +6,7 @@ use std::collections::HashMap;
use url::Url;
use crate::opts::Options;
use crate::url::{create_data_url, resolve_url};
use crate::url::{create_data_url, resolve_url, EMPTY_IMAGE_DATA_URL};
use crate::utils::retrieve_asset;
const CSS_PROPS_WITH_IMAGE_URLS: &[&str] = &[
@ -56,14 +56,14 @@ pub fn embed_css(
}
pub fn format_ident(ident: &str) -> String {
let mut res: String = String::new();
let mut res: String = "".to_string();
let _ = serialize_identifier(ident, &mut res);
res = res.trim_end().to_string();
res
}
pub fn format_quoted_string(string: &str) -> String {
let mut res: String = String::new();
let mut res: String = "".to_string();
let _ = serialize_string(string, &mut res);
res
}
@ -86,10 +86,10 @@ pub fn process_css<'a>(
prop_name: &str,
func_name: &str,
) -> Result<String, ParseError<'a, String>> {
let mut result: String = str!();
let mut result: String = "".to_string();
let mut curr_rule: String = str!(rule_name.clone());
let mut curr_prop: String = str!(prop_name.clone());
let mut curr_rule: String = rule_name.clone().to_string();
let mut curr_prop: String = prop_name.clone().to_string();
let mut token: &Token;
let mut token_offset: SourcePosition;
@ -105,7 +105,7 @@ pub fn process_css<'a>(
match *token {
Token::Comment(_) => {
let token_slice = parser.slice_from(token_offset);
result.push_str(str!(token_slice).as_str());
result.push_str(token_slice);
}
Token::Semicolon => result.push_str(";"),
Token::Colon => result.push_str(":"),
@ -161,13 +161,13 @@ pub fn process_css<'a>(
}
// div...
Token::Ident(ref value) => {
curr_rule = str!();
curr_prop = str!(value);
curr_rule = "".to_string();
curr_prop = value.to_string();
result.push_str(&format_ident(value));
}
// @import, @font-face, @charset, @media...
Token::AtKeyword(ref value) => {
curr_rule = str!(value);
curr_rule = value.to_string();
if options.no_fonts && curr_rule == "font-face" {
continue;
}
@ -181,7 +181,7 @@ pub fn process_css<'a>(
Token::QuotedString(ref value) => {
if curr_rule == "import" {
// Reset current at-rule value
curr_rule = str!();
curr_rule = "".to_string();
// Skip empty import values
if value.len() == 0 {
@ -242,7 +242,7 @@ pub fn process_css<'a>(
}
if options.no_images && is_image_url_prop(curr_prop.as_str()) {
result.push_str(format_quoted_string(empty_image!()).as_str());
result.push_str(format_quoted_string(EMPTY_IMAGE_DATA_URL).as_str());
} else {
let resolved_url: Url = resolve_url(&document_url, value);
match retrieve_asset(
@ -297,7 +297,7 @@ pub fn process_css<'a>(
if *has_sign && *unit_value >= 0. {
result.push_str("+");
}
result.push_str(str!(unit_value * 100.0).as_str());
result.push_str(&(unit_value * 100.0).to_string());
result.push_str("%");
}
Token::Dimension {
@ -309,12 +309,12 @@ pub fn process_css<'a>(
if *has_sign && *value >= 0. {
result.push_str("+");
}
result.push_str(str!(value).as_str());
result.push_str(str!(unit).as_str());
result.push_str(&value.to_string());
result.push_str(&unit.to_string());
}
// #selector, #id...
Token::IDHash(ref value) => {
curr_rule = str!();
curr_rule = "".to_string();
result.push_str("#");
result.push_str(&format_ident(value));
}
@ -324,7 +324,7 @@ pub fn process_css<'a>(
if is_import {
// Reset current at-rule value
curr_rule = str!();
curr_rule = "".to_string();
}
// Skip empty url()'s
@ -377,7 +377,7 @@ pub fn process_css<'a>(
}
} else {
if is_image_url_prop(curr_prop.as_str()) && options.no_images {
result.push_str(format_quoted_string(empty_image!()).as_str());
result.push_str(format_quoted_string(EMPTY_IMAGE_DATA_URL).as_str());
} else {
let full_url: Url = resolve_url(&document_url, value);
match retrieve_asset(

View File

@ -18,7 +18,9 @@ use std::default::Default;
use crate::css::embed_css;
use crate::js::attr_is_event_handler;
use crate::opts::Options;
use crate::url::{clean_url, create_data_url, is_url_and_has_protocol, resolve_url};
use crate::url::{
clean_url, create_data_url, is_url_and_has_protocol, resolve_url, EMPTY_IMAGE_DATA_URL,
};
use crate::utils::{parse_content_type, retrieve_asset};
struct SrcSetItem<'a> {
@ -81,7 +83,7 @@ pub fn compose_csp(options: &Options) -> String {
let mut string_list = vec![];
if options.isolate {
string_list.push("default-src 'unsafe-inline' data:;");
string_list.push("default-src 'unsafe-eval' 'unsafe-inline' data:;");
}
if options.no_css {
@ -173,11 +175,11 @@ pub fn embed_srcset(
}
}
let mut result: String = str!();
let mut result: String = "".to_string();
let mut i: usize = array.len();
for part in array {
if options.no_images {
result.push_str(empty_image!());
result.push_str(EMPTY_IMAGE_DATA_URL);
} else {
let image_full_url: Url = resolve_url(&document_url, part.path);
match retrieve_asset(
@ -205,7 +207,7 @@ pub fn embed_srcset(
result.push_str(image_full_url.as_ref());
} else {
// Avoid breaking the structure in case if not an HTTP(S) URL
result.push_str(empty_image!());
result.push_str(EMPTY_IMAGE_DATA_URL);
}
}
}
@ -342,7 +344,7 @@ pub fn get_node_attr(node: &Handle, attr_name: &str) -> Option<String> {
NodeData::Element { ref attrs, .. } => {
for attr in attrs.borrow().iter() {
if &*attr.name.local == attr_name {
return Some(str!(&*attr.value));
return Some(attr.value.to_string());
}
}
None
@ -502,8 +504,8 @@ pub fn set_node_attr(node: &Handle, attr_name: &str, attr_value: Option<String>)
found_existing_attr = true;
if let Some(attr_value) = attr_value.clone() {
&attrs_mut[i].value.clear();
&attrs_mut[i].value.push_slice(&attr_value.as_str());
let _ = &attrs_mut[i].value.clear();
let _ = &attrs_mut[i].value.push_slice(&attr_value.as_str());
} else {
// Remove attr completely if attr_value is not defined
attrs_mut.remove(i);
@ -827,10 +829,10 @@ pub fn walk_and_embed_assets(
if options.no_images {
// Put empty images into src and data-src attributes
if img_attr_src_value != None {
set_node_attr(node, "src", Some(str!(empty_image!())));
set_node_attr(node, "src", Some(EMPTY_IMAGE_DATA_URL.to_string()));
}
if img_attr_data_src_value != None {
set_node_attr(node, "data-src", Some(str!(empty_image!())));
set_node_attr(node, "data-src", Some(EMPTY_IMAGE_DATA_URL.to_string()));
}
} else {
if img_attr_src_value.clone().unwrap_or_default().is_empty()
@ -840,7 +842,7 @@ pub fn walk_and_embed_assets(
.is_empty()
{
// Add empty src attribute
set_node_attr(node, "src", Some(str!()));
set_node_attr(node, "src", Some("".to_string()));
} else {
// Add data URL src attribute
let img_full_url: String = if !img_attr_data_src_value
@ -891,11 +893,11 @@ pub fn walk_and_embed_assets(
if let Some(input_attr_src_value) = get_node_attr(node, "src") {
if options.no_images || input_attr_src_value.is_empty() {
let value = if input_attr_src_value.is_empty() {
str!()
""
} else {
str!(empty_image!())
EMPTY_IMAGE_DATA_URL
};
set_node_attr(node, "src", Some(value));
set_node_attr(node, "src", Some(value.to_string()));
} else {
retrieve_and_embed_asset(
cache,
@ -913,7 +915,7 @@ pub fn walk_and_embed_assets(
}
}
"image" => {
let mut image_href: String = str!();
let mut image_href: String = "".to_string();
if let Some(image_attr_href_value) = get_node_attr(node, "href") {
image_href = image_attr_href_value;
@ -984,7 +986,11 @@ pub fn walk_and_embed_assets(
if parent_node_name == "picture" {
if !source_attr_srcset_value.is_empty() {
if options.no_images {
set_node_attr(node, "srcset", Some(str!(empty_image!())));
set_node_attr(
node,
"srcset",
Some(EMPTY_IMAGE_DATA_URL.to_string()),
);
} else {
let resolved_srcset: String = embed_srcset(
cache,
@ -1009,7 +1015,7 @@ pub fn walk_and_embed_assets(
{
if options.no_js {
// Replace with empty JS call to preserve original behavior
set_node_attr(node, "href", Some(str!("javascript:;")));
set_node_attr(node, "href", Some("javascript:;".to_string()));
}
} else {
// Don't touch mailto: links or hrefs which begin with a hash sign
@ -1083,7 +1089,7 @@ pub fn walk_and_embed_assets(
if let Some(frame_attr_src_value) = get_node_attr(node, "src") {
if options.no_frames {
// Empty the src attribute
set_node_attr(node, "src", Some(str!()));
set_node_attr(node, "src", Some("".to_string()));
} else {
// Ignore (i)frames with empty source (they cause infinite loops)
if !frame_attr_src_value.trim().is_empty() {
@ -1144,7 +1150,11 @@ pub fn walk_and_embed_assets(
// Skip posters with empty source
if !video_attr_poster_value.is_empty() {
if options.no_images {
set_node_attr(node, "poster", Some(str!(empty_image!())));
set_node_attr(
node,
"poster",
Some(EMPTY_IMAGE_DATA_URL.to_string()),
);
} else {
retrieve_and_embed_asset(
cache,
@ -1167,8 +1177,10 @@ pub fn walk_and_embed_assets(
// Get contents of NOSCRIPT node
let mut noscript_contents = contents.borrow_mut();
// Parse contents of NOSCRIPT node as DOM
let noscript_contents_dom: RcDom =
html_to_dom(&noscript_contents.as_bytes().to_vec(), str!());
let noscript_contents_dom: RcDom = html_to_dom(
&noscript_contents.as_bytes().to_vec(),
"".to_string(),
);
// Embed assets of NOSCRIPT node contents
walk_and_embed_assets(
cache,

View File

@ -1,15 +1,6 @@
#[macro_use]
extern crate clap;
#[macro_use]
mod macros;
pub mod css;
pub mod html;
pub mod js;
pub mod opts;
pub mod url;
pub mod utils;
#[cfg(test)]
pub mod tests;

View File

@ -1,17 +0,0 @@
#[macro_export]
macro_rules! str {
() => {
String::new()
};
($val: expr) => {
ToString::to_string(&$val)
};
}
#[macro_export]
macro_rules! empty_image {
() => {
"data:image/png;base64,\
iVBORw0KGgoAAAANSUhEUgAAAA0AAAANCAQAAADY4iz3AAAAEUlEQVR42mNkwAkYR6UolgIACvgADsuK6xYAAAAASUVORK5CYII="
};
}

View File

@ -18,8 +18,6 @@ use monolith::opts::Options;
use monolith::url::{create_data_url, resolve_url};
use monolith::utils::retrieve_asset;
mod macros;
enum Output {
Stdout(io::Stdout),
File(fs::File),
@ -67,10 +65,9 @@ pub fn read_stdin() -> Vec<u8> {
fn main() {
let options = Options::from_args();
let mut target: String = str!(&options.target.clone());
// Check if target was provided
if target.len() == 0 {
if options.target.len() == 0 {
if !options.silent {
eprintln!("No target specified");
}
@ -85,66 +82,62 @@ fn main() {
}
}
let target_url: Url;
let mut use_stdin: bool = false;
// Determine exact target URL
if target.clone() == "-" {
// Read from pipe (stdin)
use_stdin = true;
// Set default target URL to an empty data URL; the user can set it via --base-url
target_url = Url::parse("data:text/html,").unwrap();
} else {
match Url::parse(&target.clone()) {
Ok(parsed_url) => {
if parsed_url.scheme() == "data"
|| parsed_url.scheme() == "file"
|| (parsed_url.scheme() == "http" || parsed_url.scheme() == "https")
{
target_url = parsed_url;
} else {
if !options.silent {
eprintln!("Unsupported target URL type: {}", &parsed_url.scheme());
}
process::exit(1);
}
}
Err(_err) => {
// Failed to parse given base URL,
// perhaps it's a filesystem path?
let path: &Path = Path::new(&target);
if path.exists() {
if path.is_file() {
match Url::from_file_path(fs::canonicalize(&path).unwrap()) {
Ok(file_url) => {
target_url = file_url;
}
Err(_err) => {
if !options.silent {
eprintln!(
"Could not generate file URL out of given path: {}",
"err"
);
}
process::exit(1);
}
}
} else {
if !options.silent {
eprintln!("Local target is not a file: {}", &options.target);
}
process::exit(1);
}
} else {
// Last chance, now we do what browsers do:
// prepend "http://" and hope it points to a website
target.insert_str(0, "http://");
target_url = Url::parse(&target).unwrap();
}
}
let target_url = match options.target.as_str() {
"-" => {
// Read from pipe (stdin)
use_stdin = true;
// Set default target URL to an empty data URL; the user can set it via --base-url
Url::parse("data:text/html,").unwrap()
}
}
target => match Url::parse(&target) {
Ok(url) => match url.scheme() {
"data" | "file" | "http" | "https" => url,
unsupported_scheme => {
if !options.silent {
eprintln!("Unsupported target URL type: {}", unsupported_scheme);
}
process::exit(1)
}
},
Err(_) => {
// Failed to parse given base URL (perhaps it's a filesystem path?)
let path: &Path = Path::new(&target);
match path.exists() {
true => match path.is_file() {
true => {
let canonical_path = fs::canonicalize(&path).unwrap();
match Url::from_file_path(canonical_path) {
Ok(url) => url,
Err(_) => {
if !options.silent {
eprintln!(
"Could not generate file URL out of given path: {}",
&target
);
}
process::exit(1);
}
}
}
false => {
if !options.silent {
eprintln!("Local target is not a file: {}", &target);
}
process::exit(1);
}
},
false => {
// It is not a FS path, now we do what browsers do:
// prepend "http://" and hope it points to a website
Url::parse(&format!("http://{hopefully_url}", hopefully_url = &target))
.unwrap()
}
}
}
},
};
// Initialize client
let mut cache = HashMap::new();
@ -170,7 +163,7 @@ fn main() {
let mut base_url: Url = target_url.clone();
let data: Vec<u8>;
let mut document_encoding: String = str!();
let mut document_encoding: String = "".to_string();
let mut dom: RcDom;
// Retrieve target document
@ -190,7 +183,12 @@ fn main() {
process::exit(1);
}
if options.base_url.clone().unwrap_or(str!()).is_empty() {
if options
.base_url
.clone()
.unwrap_or("".to_string())
.is_empty()
{
base_url = final_url;
}
@ -219,17 +217,14 @@ fn main() {
if !html_charset.is_empty() {
// Check if the charset specified inside HTML is valid
if let Some(encoding) = Encoding::for_label_no_replacement(html_charset.as_bytes()) {
// No point in parsing HTML again with the same encoding as before
if encoding.name() != "UTF-8" {
document_encoding = html_charset;
dom = html_to_dom(&data, document_encoding.clone());
}
document_encoding = html_charset;
dom = html_to_dom(&data, encoding.name().to_string());
}
}
}
// Use custom base URL if specified, read and use what's in the DOM otherwise
let custom_base_url: String = options.base_url.clone().unwrap_or(str!());
let custom_base_url: String = options.base_url.clone().unwrap_or("".to_string());
if custom_base_url.is_empty() {
// No custom base URL is specified
// Try to see if document has BASE element

View File

@ -1,4 +1,4 @@
use clap::{App, Arg};
use clap::{App, Arg, ArgAction};
use std::env;
#[derive(Default)]
@ -7,7 +7,9 @@ pub struct Options {
pub base_url: Option<String>,
pub no_css: bool,
pub charset: Option<String>,
pub domains: Option<Vec<String>>,
pub ignore_errors: bool,
pub exclude_domains: bool,
pub no_frames: bool,
pub no_fonts: bool,
pub no_images: bool,
@ -43,14 +45,24 @@ const ENV_VAR_TERM: &str = "TERM";
impl Options {
pub fn from_args() -> Options {
let app = App::new(env!("CARGO_PKG_NAME"))
.version(crate_version!())
.author(format!("\n{}", crate_authors!("\n")).as_str())
.about(format!("{}\n{}", ASCII, crate_description!()).as_str())
.version(env!("CARGO_PKG_VERSION"))
.author(format!("\n{}\n\n", env!("CARGO_PKG_AUTHORS").replace(':', "\n")).as_str())
.about(format!("{}\n{}", ASCII, env!("CARGO_PKG_DESCRIPTION")).as_str())
.args_from_usage("-a, --no-audio 'Removes audio sources'")
.args_from_usage("-b, --base-url=[http://localhost/] 'Sets custom base URL'")
.args_from_usage("-c, --no-css 'Removes CSS'")
.args_from_usage("-C, --charset=[UTF-8] 'Enforces custom encoding'")
.arg(
Arg::with_name("domains")
.short('d')
.long("domains")
.takes_value(true)
.value_name("DOMAINS")
.action(ArgAction::Append)
.help("Whitelist of domains"),
)
.args_from_usage("-e, --ignore-errors 'Ignore network errors'")
.args_from_usage("-E, --exclude-domains 'Treat specified domains as blacklist'")
.args_from_usage("-f, --no-frames 'Removes frames and iframes'")
.args_from_usage("-F, --no-fonts 'Removes fonts'")
.args_from_usage("-i, --no-images 'Removes images'")
@ -85,13 +97,18 @@ impl Options {
.to_string();
options.no_audio = app.is_present("no-audio");
if let Some(base_url) = app.value_of("base-url") {
options.base_url = Some(str!(base_url));
options.base_url = Some(base_url.to_string());
}
options.no_css = app.is_present("no-css");
if let Some(charset) = app.value_of("charset") {
options.charset = Some(str!(charset));
options.charset = Some(charset.to_string());
}
if let Some(domains) = app.get_many::<String>("domains") {
let list_of_domains: Vec<String> = domains.map(|v| v.clone()).collect::<Vec<_>>();
options.domains = Some(list_of_domains);
}
options.ignore_errors = app.is_present("ignore-errors");
options.exclude_domains = app.is_present("exclude-domains");
options.no_frames = app.is_present("no-frames");
options.no_fonts = app.is_present("no-fonts");
options.no_images = app.is_present("no-images");
@ -107,7 +124,7 @@ impl Options {
.parse::<u64>()
.unwrap();
if let Some(user_agent) = app.value_of("user-agent") {
options.user_agent = Some(str!(user_agent));
options.user_agent = Some(user_agent.to_string());
} else {
options.user_agent = Some(DEFAULT_USER_AGENT.to_string());
}

View File

@ -1,39 +0,0 @@
// ██████╗ █████╗ ███████╗███████╗██╗███╗ ██╗ ██████╗
// ██╔══██╗██╔══██╗██╔════╝██╔════╝██║████╗ ██║██╔════╝
// ██████╔╝███████║███████╗███████╗██║██╔██╗ ██║██║ ███╗
// ██╔═══╝ ██╔══██║╚════██║╚════██║██║██║╚██╗██║██║ ██║
// ██║ ██║ ██║███████║███████║██║██║ ╚████║╚██████╔╝
// ╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝
#[cfg(test)]
mod passing {
use crate::url;
#[test]
fn decode_unicode_characters() {
assert_eq!(
url::percent_decode(str!(
"%E6%A4%9C%E3%83%92%E3%83%A0%E8%A7%A3%E5%A1%97%E3%82%83%E3%83%83%20%3D%20%E3%82%B5"
)),
"検ヒム解塗ゃッ = サ"
);
}
#[test]
fn decode_file_url() {
assert_eq!(
url::percent_decode(str!("file:///tmp/space%20here/test%231.html")),
"file:///tmp/space here/test#1.html"
);
}
#[test]
fn plus_sign() {
assert_eq!(
url::percent_decode(str!(
"fonts.somewhere.com/css?family=Open+Sans:300,400,400italic,600,600italic"
)),
"fonts.somewhere.com/css?family=Open+Sans:300,400,400italic,600,600italic"
);
}
}

View File

@ -1,16 +0,0 @@
// ██████╗ █████╗ ███████╗███████╗██╗███╗ ██╗ ██████╗
// ██╔══██╗██╔══██╗██╔════╝██╔════╝██║████╗ ██║██╔════╝
// ██████╔╝███████║███████╗███████╗██║██╔██╗ ██║██║ ███╗
// ██╔═══╝ ██╔══██║╚════██║╚════██║██║██║╚██╗██║██║ ██║
// ██║ ██║ ██║███████║███████║██║██║ ╚████║╚██████╔╝
// ╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝
#[cfg(test)]
mod passing {
use crate::url;
#[test]
fn apostrophe() {
assert_eq!(url::percent_encode(str!("'")), "%27");
}
}

View File

@ -1,8 +1,12 @@
use base64;
use url::{form_urlencoded, Url};
use percent_encoding::percent_decode_str;
use url::Url;
use crate::utils::{detect_media_type, parse_content_type};
pub const EMPTY_IMAGE_DATA_URL: &'static str = "data:image/png;base64,\
iVBORw0KGgoAAAANSUhEUgAAAA0AAAANCAQAAADY4iz3AAAAEUlEQVR42mNkwAkYR6UolgIACvgADsuK6xYAAAAASUVORK5CYII=";
pub fn clean_url(url: Url) -> Url {
let mut url = url.clone();
@ -26,7 +30,7 @@ pub fn create_data_url(media_type: &str, charset: &str, data: &[u8], final_asset
if !charset.trim().is_empty() && !charset.trim().eq_ignore_ascii_case("US-ASCII") {
format!(";charset={}", charset.trim())
} else {
str!()
"".to_string()
};
data_url.set_path(format!("{}{};base64,{}", media_type, c, base64::encode(data)).as_str());
@ -57,7 +61,7 @@ pub fn parse_data_url(url: &Url) -> (String, String, Vec<u8>) {
let (media_type, charset, is_base64) = parse_content_type(&content_type);
// Parse raw data into vector of bytes
let text: String = percent_decode(data);
let text: String = percent_decode_str(&data).decode_utf8_lossy().to_string();
let blob: Vec<u8> = if is_base64 {
base64::decode(&text).unwrap_or(vec![])
} else {
@ -67,29 +71,6 @@ pub fn parse_data_url(url: &Url) -> (String, String, Vec<u8>) {
(media_type, charset, blob)
}
pub fn percent_decode(input: String) -> String {
let input: String = input.replace("+", "%2B");
form_urlencoded::parse(input.as_bytes())
.map(|(key, val)| {
[
key.to_string(),
if val.to_string().len() == 0 {
str!()
} else {
str!('=')
},
val.to_string(),
]
.concat()
})
.collect()
}
pub fn percent_encode(input: String) -> String {
form_urlencoded::byte_serialize(input.as_bytes()).collect()
}
pub fn resolve_url(from: &Url, to: &str) -> Url {
match Url::parse(&to) {
Ok(parsed_url) => parsed_url,

View File

@ -92,6 +92,62 @@ pub fn detect_media_type_by_file_name(filename: &str) -> String {
mime.to_string()
}
pub fn domain_is_within_domain(domain: &str, domain_to_match_against: &str) -> bool {
if domain_to_match_against.len() == 0 {
return false;
}
if domain_to_match_against == "." {
return true;
}
let domain_partials: Vec<&str> = domain.trim_end_matches(".").rsplit(".").collect();
let domain_to_match_against_partials: Vec<&str> = domain_to_match_against
.trim_end_matches(".")
.rsplit(".")
.collect();
let domain_to_match_against_starts_with_a_dot = domain_to_match_against.starts_with(".");
let mut i: usize = 0;
let l: usize = std::cmp::max(
domain_partials.len(),
domain_to_match_against_partials.len(),
);
let mut ok: bool = true;
while i < l {
// Exit and return false if went out of bounds of domain to match against, and it didn't start with a dot
if !domain_to_match_against_starts_with_a_dot
&& domain_to_match_against_partials.len() < i + 1
{
ok = false;
break;
}
let domain_partial = if domain_partials.len() < i + 1 {
""
} else {
domain_partials.get(i).unwrap()
};
let domain_to_match_against_partial = if domain_to_match_against_partials.len() < i + 1 {
""
} else {
domain_to_match_against_partials.get(i).unwrap()
};
let parts_match = domain_to_match_against_partial.eq_ignore_ascii_case(domain_partial);
if !parts_match && domain_to_match_against_partial.len() != 0 {
ok = false;
break;
}
i += 1;
}
ok
}
pub fn indent(level: u32) -> String {
let mut result: String = String::new();
let mut l: u32 = level;
@ -110,8 +166,8 @@ pub fn is_plaintext_media_type(media_type: &str) -> bool {
}
pub fn parse_content_type(content_type: &str) -> (String, String, bool) {
let mut media_type: String = str!("text/plain");
let mut charset: String = str!("US-ASCII");
let mut media_type: String = "text/plain".to_string();
let mut charset: String = "US-ASCII".to_string();
let mut is_base64: bool = false;
// Parse meta data
@ -120,7 +176,7 @@ pub fn parse_content_type(content_type: &str) -> (String, String, bool) {
for item in &content_type_items {
if i == 0 {
if item.trim().len() > 0 {
media_type = str!(item.trim());
media_type = item.trim().to_string();
}
} else {
if item.trim().eq_ignore_ascii_case("base64") {
@ -148,7 +204,7 @@ pub fn retrieve_asset(
let (media_type, charset, data) = parse_data_url(url);
Ok((data, url.clone(), media_type, charset))
} else if url.scheme() == "file" {
// Check if parent_url is also file:/// (if not, then we don't embed the asset)
// Check if parent_url is also a file: URL (if not, then we don't embed the asset)
if parent_url.scheme() != "file" {
if !options.silent {
eprintln!(
@ -199,7 +255,7 @@ pub fn retrieve_asset(
file_blob.clone(),
url.clone(),
detect_media_type(&file_blob, url),
str!(),
"".to_string(),
))
}
} else {
@ -232,14 +288,25 @@ pub fn retrieve_asset(
Ok((
cache.get(&cache_key).unwrap().to_vec(),
url.clone(),
str!(),
str!(),
"".to_string(),
"".to_string(),
))
} else {
if let Some(domains) = &options.domains {
let domain_matches = domains
.iter()
.any(|d| domain_is_within_domain(url.host_str().unwrap(), &d.trim()));
if (options.exclude_domains && domain_matches)
|| (!options.exclude_domains && !domain_matches)
{
return Err(client.get("").send().unwrap_err());
}
}
// URL not in cache, we retrieve the file
match client.get(url.as_str()).send() {
Ok(mut response) => {
if !options.ignore_errors && response.status() != 200 {
Ok(response) => {
if !options.ignore_errors && response.status() != reqwest::StatusCode::OK {
if !options.silent {
eprintln!(
"{}{}{} ({}){}",
@ -258,19 +325,17 @@ pub fn retrieve_asset(
return Err(client.get("").send().unwrap_err());
}
let response_url: Url = response.url().clone();
if !options.silent {
if url.as_str() == response.url().as_str() {
if url.as_str() == response_url.as_str() {
eprintln!("{}{}", indent(depth).as_str(), &url);
} else {
eprintln!("{}{} -> {}", indent(depth).as_str(), &url, &response.url());
eprintln!("{}{} -> {}", indent(depth).as_str(), &url, &response_url);
}
}
let new_cache_key: String = clean_url(response.url().clone()).to_string();
// Convert response into a byte array
let mut data: Vec<u8> = vec![];
response.copy_to(&mut data).unwrap();
let new_cache_key: String = clean_url(response_url.clone()).to_string();
// Attempt to obtain media type and charset by reading Content-Type header
let content_type: &str = response
@ -281,11 +346,34 @@ pub fn retrieve_asset(
let (media_type, charset, _is_base64) = parse_content_type(&content_type);
// Convert response into a byte array
let mut data: Vec<u8> = vec![];
match response.bytes() {
Ok(b) => {
data = b.to_vec();
}
Err(error) => {
if !options.silent {
eprintln!(
"{}{}{}{}",
indent(depth).as_str(),
if options.no_color { "" } else { ANSI_COLOR_RED },
error,
if options.no_color {
""
} else {
ANSI_COLOR_RESET
},
);
}
}
}
// Add retrieved resource to cache
cache.insert(new_cache_key, data.clone());
// Return
Ok((data, response.url().clone(), media_type, charset))
Ok((data, response_url, media_type, charset))
}
Err(error) => {
if !options.silent {

View File

Before

Width:  |  Height:  |  Size: 296 B

After

Width:  |  Height:  |  Size: 296 B

View File

Before

Width:  |  Height:  |  Size: 296 B

After

Width:  |  Height:  |  Size: 296 B

View File

@ -77,8 +77,8 @@ mod passing {
#[test]
fn css_import_string() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let path_html: &Path = Path::new("src/tests/data/css/index.html");
let path_css: &Path = Path::new("src/tests/data/css/style.css");
let path_html: &Path = Path::new("tests/_data_/css/index.html");
let path_css: &Path = Path::new("tests/_data_/css/style.css");
assert!(path_html.is_file());
assert!(path_css.is_file());

View File

@ -11,6 +11,8 @@ mod passing {
use std::env;
use std::process::Command;
use monolith::url::EMPTY_IMAGE_DATA_URL;
#[test]
fn isolate_data_url() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
@ -28,7 +30,7 @@ mod passing {
assert_eq!(
String::from_utf8_lossy(&out.stdout),
"<html><head>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-inline' data:;\"></meta>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-eval' 'unsafe-inline' data:;\"></meta>\
</head><body>Hello, World!</body></html>\n"
);
@ -139,7 +141,7 @@ mod passing {
Hi\
</body>\
</html>\n",
empty_image = empty_image!()
empty_image = EMPTY_IMAGE_DATA_URL,
)
);

View File

@ -14,15 +14,20 @@ mod passing {
use std::process::Command;
use url::Url;
use monolith::url::EMPTY_IMAGE_DATA_URL;
#[test]
fn local_file_target_input_relative_target_path() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let cwd_normalized: String =
str!(env::current_dir().unwrap().to_str().unwrap()).replace("\\", "/");
let cwd_normalized: String = env::current_dir()
.unwrap()
.to_str()
.unwrap()
.replace("\\", "/");
let out = cmd
.arg("-M")
.arg(format!(
"src{s}tests{s}data{s}basic{s}local-file.html",
"tests{s}_data_{s}basic{s}local-file.html",
s = MAIN_SEPARATOR
))
.output()
@ -34,11 +39,11 @@ mod passing {
String::from_utf8_lossy(&out.stderr),
format!(
"\
{file}{cwd}/src/tests/data/basic/local-file.html\n \
{file}{cwd}/src/tests/data/basic/local-style.css\n \
{file}{cwd}/src/tests/data/basic/local-style-does-not-exist.css (not found)\n \
{file}{cwd}/src/tests/data/basic/monolith.png (not found)\n \
{file}{cwd}/src/tests/data/basic/local-script.js\n\
{file}{cwd}/tests/_data_/basic/local-file.html\n \
{file}{cwd}/tests/_data_/basic/local-style.css\n \
{file}{cwd}/tests/_data_/basic/local-style-does-not-exist.css (not found)\n \
{file}{cwd}/tests/_data_/basic/monolith.png (not found)\n \
{file}{cwd}/tests/_data_/basic/local-script.js\n\
",
file = file_url_protocol,
cwd = cwd_normalized
@ -69,7 +74,7 @@ mod passing {
#[test]
fn local_file_target_input_absolute_target_path() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let path_html: &Path = Path::new("src/tests/data/basic/local-file.html");
let path_html: &Path = Path::new("tests/_data_/basic/local-file.html");
let out = cmd
.arg("-M")
@ -93,7 +98,7 @@ mod passing {
format!(
"\
<!DOCTYPE html><html lang=\"en\"><head>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-inline' data:; style-src 'none'; script-src 'none'; img-src data:;\"></meta>\n \
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-eval' 'unsafe-inline' data:; style-src 'none'; script-src 'none'; img-src data:;\"></meta>\n \
<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n \
<title>Local HTML file</title>\n \
<link rel=\"stylesheet\" type=\"text/css\">\n \
@ -104,7 +109,7 @@ mod passing {
<script></script>\n\n\n\n\
</body></html>\n\
",
empty_image = empty_image!()
empty_image = EMPTY_IMAGE_DATA_URL
)
);
@ -115,14 +120,17 @@ mod passing {
#[test]
fn local_file_url_target_input() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let cwd_normalized: String =
str!(env::current_dir().unwrap().to_str().unwrap()).replace("\\", "/");
let cwd_normalized: String = env::current_dir()
.unwrap()
.to_str()
.unwrap()
.replace("\\", "/");
let file_url_protocol: &str = if cfg!(windows) { "file:///" } else { "file://" };
let out = cmd
.arg("-M")
.arg("-cji")
.arg(format!(
"{file}{cwd}/src/tests/data/basic/local-file.html",
"{file}{cwd}/tests/_data_/basic/local-file.html",
file = file_url_protocol,
cwd = cwd_normalized,
))
@ -133,7 +141,7 @@ mod passing {
assert_eq!(
String::from_utf8_lossy(&out.stderr),
format!(
"{file}{cwd}/src/tests/data/basic/local-file.html\n",
"{file}{cwd}/tests/_data_/basic/local-file.html\n",
file = file_url_protocol,
cwd = cwd_normalized,
)
@ -156,7 +164,7 @@ mod passing {
<script></script>\n\n\n\n\
</body></html>\n\
",
empty_image = empty_image!()
empty_image = EMPTY_IMAGE_DATA_URL
)
);
@ -167,8 +175,8 @@ mod passing {
#[test]
fn embed_file_url_local_asset_within_style_attribute() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let path_html: &Path = Path::new("src/tests/data/svg/index.html");
let path_svg: &Path = Path::new("src/tests/data/svg/image.svg");
let path_html: &Path = Path::new("tests/_data_/svg/index.html");
let path_svg: &Path = Path::new("tests/_data_/svg/image.svg");
let out = cmd.arg("-M").arg(path_html.as_os_str()).output().unwrap();
@ -198,21 +206,24 @@ mod passing {
#[test]
fn discard_integrity_for_local_files() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let cwd_normalized: String =
str!(env::current_dir().unwrap().to_str().unwrap()).replace("\\", "/");
let cwd_normalized: String = env::current_dir()
.unwrap()
.to_str()
.unwrap()
.replace("\\", "/");
let file_url_protocol: &str = if cfg!(windows) { "file:///" } else { "file://" };
let out = cmd
.arg("-M")
.arg("-i")
.arg(if cfg!(windows) {
format!(
"{file}{cwd}/src/tests/data/integrity/index.html",
"{file}{cwd}/tests/_data_/integrity/index.html",
file = file_url_protocol,
cwd = cwd_normalized,
)
} else {
format!(
"{file}{cwd}/src/tests/data/integrity/index.html",
"{file}{cwd}/tests/_data_/integrity/index.html",
file = file_url_protocol,
cwd = cwd_normalized,
)
@ -225,11 +236,11 @@ mod passing {
String::from_utf8_lossy(&out.stderr),
format!(
"\
{file}{cwd}/src/tests/data/integrity/index.html\n \
{file}{cwd}/src/tests/data/integrity/style.css\n \
{file}{cwd}/src/tests/data/integrity/style.css\n \
{file}{cwd}/src/tests/data/integrity/script.js\n \
{file}{cwd}/src/tests/data/integrity/script.js\n\
{file}{cwd}/tests/_data_/integrity/index.html\n \
{file}{cwd}/tests/_data_/integrity/style.css\n \
{file}{cwd}/tests/_data_/integrity/style.css\n \
{file}{cwd}/tests/_data_/integrity/script.js\n \
{file}{cwd}/tests/_data_/integrity/script.js\n\
",
file = file_url_protocol,
cwd = cwd_normalized,

View File

@ -17,8 +17,8 @@ mod passing {
#[test]
fn parse_noscript_contents() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let path_html: &Path = Path::new("src/tests/data/noscript/index.html");
let path_svg: &Path = Path::new("src/tests/data/noscript/image.svg");
let path_html: &Path = Path::new("tests/_data_/noscript/index.html");
let path_svg: &Path = Path::new("tests/_data_/noscript/image.svg");
let out = cmd.arg("-M").arg(path_html.as_os_str()).output().unwrap();
@ -48,8 +48,8 @@ mod passing {
#[test]
fn unwrap_noscript_contents() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let path_html: &Path = Path::new("src/tests/data/noscript/index.html");
let path_svg: &Path = Path::new("src/tests/data/noscript/image.svg");
let path_html: &Path = Path::new("tests/_data_/noscript/index.html");
let path_svg: &Path = Path::new("tests/_data_/noscript/image.svg");
let out = cmd.arg("-Mn").arg(path_html.as_os_str()).output().unwrap();
@ -79,8 +79,8 @@ mod passing {
#[test]
fn unwrap_noscript_contents_nested() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let path_html: &Path = Path::new("src/tests/data/noscript/nested.html");
let path_svg: &Path = Path::new("src/tests/data/noscript/image.svg");
let path_html: &Path = Path::new("tests/_data_/noscript/nested.html");
let path_svg: &Path = Path::new("tests/_data_/noscript/image.svg");
let out = cmd.arg("-Mn").arg(path_html.as_os_str()).output().unwrap();
@ -110,8 +110,8 @@ mod passing {
#[test]
fn unwrap_noscript_contents_with_script() {
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let path_html: &Path = Path::new("src/tests/data/noscript/script.html");
let path_svg: &Path = Path::new("src/tests/data/noscript/image.svg");
let path_html: &Path = Path::new("tests/_data_/noscript/script.html");
let path_svg: &Path = Path::new("tests/_data_/noscript/image.svg");
let out = cmd.arg("-Mn").arg(path_html.as_os_str()).output().unwrap();

View File

@ -16,12 +16,12 @@ mod passing {
#[test]
fn properly_save_document_with_gb2312() {
let cwd = env::current_dir().unwrap();
let cwd_normalized: String = str!(cwd.to_str().unwrap()).replace("\\", "/");
let cwd_normalized: String = cwd.to_str().unwrap().replace("\\", "/");
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let out = cmd
.arg("-M")
.arg(format!(
"src{s}tests{s}data{s}unusual_encodings{s}gb2312.html",
"tests{s}_data_{s}unusual_encodings{s}gb2312.html",
s = MAIN_SEPARATOR
))
.output()
@ -32,7 +32,7 @@ mod passing {
assert_eq!(
String::from_utf8_lossy(&out.stderr),
format!(
"{file}{cwd}/src/tests/data/unusual_encodings/gb2312.html\n",
"{file}{cwd}/tests/_data_/unusual_encodings/gb2312.html\n",
file = file_url_protocol,
cwd = cwd_normalized,
)
@ -67,7 +67,7 @@ mod passing {
fn properly_save_document_with_gb2312_from_stdin() {
let mut echo = Command::new("cat")
.arg(format!(
"src{s}tests{s}data{s}unusual_encodings{s}gb2312.html",
"tests{s}_data_{s}unusual_encodings{s}gb2312.html",
s = MAIN_SEPARATOR
))
.stdout(Stdio::piped())
@ -111,14 +111,14 @@ mod passing {
#[test]
fn properly_save_document_with_gb2312_custom_charset() {
let cwd = env::current_dir().unwrap();
let cwd_normalized: String = str!(cwd.to_str().unwrap()).replace("\\", "/");
let cwd_normalized: String = cwd.to_str().unwrap().replace("\\", "/");
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let out = cmd
.arg("-M")
.arg("-C")
.arg("utf8")
.arg(format!(
"src{s}tests{s}data{s}unusual_encodings{s}gb2312.html",
"tests{s}_data_{s}unusual_encodings{s}gb2312.html",
s = MAIN_SEPARATOR
))
.output()
@ -129,7 +129,7 @@ mod passing {
assert_eq!(
String::from_utf8_lossy(&out.stderr),
format!(
"{file}{cwd}/src/tests/data/unusual_encodings/gb2312.html\n",
"{file}{cwd}/tests/_data_/unusual_encodings/gb2312.html\n",
file = file_url_protocol,
cwd = cwd_normalized,
)
@ -161,7 +161,7 @@ mod passing {
.arg("-C")
.arg("utf0")
.arg(format!(
"src{s}tests{s}data{s}unusual_encodings{s}gb2312.html",
"tests{s}_data_{s}unusual_encodings{s}gb2312.html",
s = MAIN_SEPARATOR
))
.output()
@ -198,12 +198,12 @@ mod failing {
#[test]
fn change_iso88591_to_utf8_to_properly_display_html_entities() {
let cwd = env::current_dir().unwrap();
let cwd_normalized: String = str!(cwd.to_str().unwrap()).replace("\\", "/");
let cwd_normalized: String = cwd.to_str().unwrap().replace("\\", "/");
let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap();
let out = cmd
.arg("-M")
.arg(format!(
"src{s}tests{s}data{s}unusual_encodings{s}iso-8859-1.html",
"tests{s}_data_{s}unusual_encodings{s}iso-8859-1.html",
s = MAIN_SEPARATOR
))
.output()
@ -214,7 +214,7 @@ mod failing {
assert_eq!(
String::from_utf8_lossy(&out.stderr),
format!(
"{file}{cwd}/src/tests/data/unusual_encodings/iso-8859-1.html\n",
"{file}{cwd}/tests/_data_/unusual_encodings/iso-8859-1.html\n",
file = file_url_protocol,
cwd = cwd_normalized,
)

View File

@ -11,8 +11,9 @@ mod passing {
use reqwest::Url;
use std::collections::HashMap;
use crate::css;
use crate::opts::Options;
use monolith::css;
use monolith::opts::Options;
use monolith::url::EMPTY_IMAGE_DATA_URL;
#[test]
fn empty_input() {
@ -67,7 +68,7 @@ mod passing {
margin-top: -20px; \
line-height: -1; \
height: calc(100vh - 10pt)",
empty_image = empty_image!()
empty_image = EMPTY_IMAGE_DATA_URL
)
);
}
@ -99,7 +100,7 @@ mod passing {
margin-top: -20px; \
line-height: -1; \
height: calc(100vh - 10pt)",
empty_image = empty_image!()
empty_image = EMPTY_IMAGE_DATA_URL
)
);
}

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::css;
use monolith::css;
#[test]
fn backrgound() {
@ -64,7 +64,7 @@ mod passing {
#[cfg(test)]
mod failing {
use crate::css;
use monolith::css;
#[test]
fn empty() {

View File

@ -9,12 +9,12 @@
mod passing {
use html5ever::serialize::{serialize, SerializeOpts};
use crate::html;
use monolith::html;
#[test]
fn basic() {
let html = "<div>text</div>";
let mut dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let mut dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
dom = html::add_favicon(&dom.document, "I_AM_A_FAVICON_DATA_URL".to_string());

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::html;
use monolith::html;
#[test]
fn empty_input_sha256() {
@ -51,7 +51,7 @@ mod passing {
#[cfg(test)]
mod failing {
use crate::html;
use monolith::html;
#[test]
fn empty_hash() {

View File

@ -7,8 +7,8 @@
#[cfg(test)]
mod passing {
use crate::html;
use crate::opts::Options;
use monolith::html;
use monolith::opts::Options;
#[test]
fn isolated() {
@ -16,7 +16,10 @@ mod passing {
options.isolate = true;
let csp_content = html::compose_csp(&options);
assert_eq!(csp_content, "default-src 'unsafe-inline' data:;");
assert_eq!(
csp_content,
"default-src 'unsafe-eval' 'unsafe-inline' data:;"
);
}
#[test]
@ -75,6 +78,6 @@ mod passing {
options.no_images = true;
let csp_content = html::compose_csp(&options);
assert_eq!(csp_content, "default-src 'unsafe-inline' data:; style-src 'none'; font-src 'none'; frame-src 'none'; child-src 'none'; script-src 'none'; img-src data:;");
assert_eq!(csp_content, "default-src 'unsafe-eval' 'unsafe-inline' data:; style-src 'none'; font-src 'none'; frame-src 'none'; child-src 'none'; script-src 'none'; img-src data:;");
}
}

View File

@ -10,7 +10,7 @@ mod passing {
use chrono::prelude::*;
use reqwest::Url;
use crate::html;
use monolith::html;
#[test]
fn http_url() {

View File

@ -11,8 +11,9 @@ mod passing {
use reqwest::Url;
use std::collections::HashMap;
use crate::html;
use crate::opts::Options;
use monolith::html;
use monolith::opts::Options;
use monolith::url::EMPTY_IMAGE_DATA_URL;
#[test]
fn small_medium_large() {
@ -35,9 +36,7 @@ mod passing {
embedded_css,
format!(
"{} 1x, {} 1.5x, {} 2x",
empty_image!(),
empty_image!(),
empty_image!(),
EMPTY_IMAGE_DATA_URL, EMPTY_IMAGE_DATA_URL, EMPTY_IMAGE_DATA_URL,
),
);
}
@ -61,7 +60,7 @@ mod passing {
assert_eq!(
embedded_css,
format!("{}, {} 1.5x", empty_image!(), empty_image!()),
format!("{}, {} 1.5x", EMPTY_IMAGE_DATA_URL, EMPTY_IMAGE_DATA_URL),
);
}
@ -84,7 +83,7 @@ mod passing {
assert_eq!(
embedded_css,
format!("{} 1x, {} 2x", empty_image!(), empty_image!()),
format!("{} 1x, {} 2x", EMPTY_IMAGE_DATA_URL, EMPTY_IMAGE_DATA_URL),
);
}
@ -109,9 +108,7 @@ mod passing {
embedded_css,
format!(
"{} 1x, {} 2x, {} 3x",
empty_image!(),
empty_image!(),
empty_image!()
EMPTY_IMAGE_DATA_URL, EMPTY_IMAGE_DATA_URL, EMPTY_IMAGE_DATA_URL
),
);
}
@ -130,8 +127,9 @@ mod failing {
use reqwest::Url;
use std::collections::HashMap;
use crate::html;
use crate::opts::Options;
use monolith::html;
use monolith::opts::Options;
use monolith::url::EMPTY_IMAGE_DATA_URL;
#[test]
fn trailing_comma() {
@ -152,7 +150,7 @@ mod failing {
assert_eq!(
embedded_css,
format!("{} 1x, {} 2x,", empty_image!(), empty_image!()),
format!("{} 1x, {} 2x,", EMPTY_IMAGE_DATA_URL, EMPTY_IMAGE_DATA_URL),
);
}
}

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::html;
use monolith::html;
#[test]
fn present() {
@ -19,11 +19,11 @@ mod passing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(
html::get_base_url(&dom.document),
Some(str!("https://musicbrainz.org"))
Some("https://musicbrainz.org".to_string())
);
}
@ -38,11 +38,11 @@ mod passing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(
html::get_base_url(&dom.document),
Some(str!("https://www.discogs.com/"))
Some("https://www.discogs.com/".to_string())
);
}
}
@ -56,7 +56,7 @@ mod passing {
#[cfg(test)]
mod failing {
use crate::html;
use monolith::html;
#[test]
fn absent() {
@ -67,7 +67,7 @@ mod failing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(html::get_base_url(&dom.document), None);
}
@ -82,7 +82,7 @@ mod failing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(html::get_base_url(&dom.document), None);
}
@ -97,8 +97,8 @@ mod failing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(html::get_base_url(&dom.document), Some(str!()));
assert_eq!(html::get_base_url(&dom.document), Some("".to_string()));
}
}

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::html;
use monolith::html;
#[test]
fn meta_content_type() {
@ -19,9 +19,9 @@ mod passing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(html::get_charset(&dom.document), Some(str!("GB2312")));
assert_eq!(html::get_charset(&dom.document), Some("GB2312".to_string()));
}
#[test]
@ -34,9 +34,9 @@ mod passing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(html::get_charset(&dom.document), Some(str!("GB2312")));
assert_eq!(html::get_charset(&dom.document), Some("GB2312".to_string()));
}
#[test]
@ -50,9 +50,9 @@ mod passing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(html::get_charset(&dom.document), Some(str!("utf-8")));
assert_eq!(html::get_charset(&dom.document), Some("utf-8".to_string()));
}
#[test]
fn multiple_conflicting_meta_content_type_first() {
@ -65,8 +65,8 @@ mod passing {
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
assert_eq!(html::get_charset(&dom.document), Some(str!("GB2312")));
assert_eq!(html::get_charset(&dom.document), Some("GB2312".to_string()));
}
}

View File

@ -9,12 +9,12 @@
mod passing {
use html5ever::rcdom::{Handle, NodeData};
use crate::html;
use monolith::html;
#[test]
fn div_two_style_attributes() {
let html = "<!doctype html><html><head></head><body><DIV STYLE=\"color: blue;\" style=\"display: none;\"></div></body></html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut count = 0;
fn test_walk(node: &Handle, i: &mut i8) {
@ -35,7 +35,7 @@ mod passing {
} else if node_name == "div" {
assert_eq!(
html::get_node_attr(node, "style"),
Some(str!("color: blue;"))
Some("color: blue;".to_string())
);
}

View File

@ -9,12 +9,12 @@
mod passing {
use html5ever::rcdom::{Handle, NodeData};
use crate::html;
use monolith::html;
#[test]
fn parent_node_names() {
let html = "<!doctype html><html><HEAD></HEAD><body><div><P></P></div></body></html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut count = 0;
fn test_walk(node: &Handle, i: &mut i8) {

View File

@ -7,12 +7,12 @@
#[cfg(test)]
mod passing {
use crate::html;
use monolith::html;
#[test]
fn icon() {
let html = "<link rel=\"icon\" href=\"\" /><div>text</div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let res: bool = html::has_favicon(&dom.document);
assert!(res);
@ -21,7 +21,7 @@ mod passing {
#[test]
fn shortcut_icon() {
let html = "<link rel=\"shortcut icon\" href=\"\" /><div>text</div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let res: bool = html::has_favicon(&dom.document);
assert!(res);
@ -37,12 +37,12 @@ mod passing {
#[cfg(test)]
mod failing {
use crate::html;
use monolith::html;
#[test]
fn absent() {
let html = "<div>text</div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let res: bool = html::has_favicon(&dom.document);
assert!(!res);

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::html;
use monolith::html;
#[test]
fn icon() {
@ -34,7 +34,7 @@ mod passing {
#[cfg(test)]
mod failing {
use crate::html;
use monolith::html;
#[test]
fn mask_icon() {

View File

@ -7,17 +7,17 @@
#[cfg(test)]
mod passing {
use crate::html;
use crate::opts::Options;
use monolith::html;
use monolith::opts::Options;
#[test]
fn div_as_root_element() {
let html = "<div><script src=\"some.js\"></script></div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let options = Options::default();
assert_eq!(
String::from_utf8_lossy(&html::serialize_document(dom, str!(), &options)),
String::from_utf8_lossy(&html::serialize_document(dom, "".to_string(), &options)),
"<html><head></head><body><div><script src=\"some.js\"></script></div></body></html>"
);
}
@ -28,19 +28,19 @@ mod passing {
<link rel=\"something\" href=\"some.css\" />\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src https:\">\
<div><script src=\"some.js\"></script></div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut options = Options::default();
options.isolate = true;
assert_eq!(
String::from_utf8_lossy(&html::serialize_document(
dom,
str!(),
"".to_string(),
&options
)),
"<html>\
<head>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-inline' data:;\"></meta>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-eval' 'unsafe-inline' data:;\"></meta>\
<title>Isolated document</title>\
<link rel=\"something\" href=\"some.css\">\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src https:\">\
@ -60,12 +60,12 @@ mod passing {
<title>Unstyled document</title>\
<link rel=\"stylesheet\" href=\"main.css\"/>\
<div style=\"display: none;\"></div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut options = Options::default();
options.no_css = true;
assert_eq!(
String::from_utf8_lossy(&html::serialize_document(dom, str!(), &options)),
String::from_utf8_lossy(&html::serialize_document(dom, "".to_string(), &options)),
"<!DOCTYPE html>\
<html>\
<head>\
@ -84,14 +84,14 @@ mod passing {
<title>Frameless document</title>\
<link rel=\"something\"/>\
<div><script src=\"some.js\"></script></div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut options = Options::default();
options.no_frames = true;
assert_eq!(
String::from_utf8_lossy(&html::serialize_document(
dom,
str!(),
"".to_string(),
&options
)),
"<!DOCTYPE html>\
@ -117,7 +117,7 @@ mod passing {
<img style=\"width: 100%;\" src=\"some.png\" />\
<iframe src=\"some.html\"></iframe>\
</div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut options = Options::default();
options.isolate = true;
options.no_css = true;
@ -129,13 +129,13 @@ mod passing {
assert_eq!(
String::from_utf8_lossy(&html::serialize_document(
dom,
str!(),
"".to_string(),
&options
)),
"<!DOCTYPE html>\
<html>\
<head>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-inline' data:; style-src 'none'; font-src 'none'; frame-src 'none'; child-src 'none'; script-src 'none'; img-src data:;\"></meta>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src 'unsafe-eval' 'unsafe-inline' data:; style-src 'none'; font-src 'none'; frame-src 'none'; child-src 'none'; script-src 'none'; img-src data:;\"></meta>\
<title>no-frame no-css no-js no-image isolated document</title>\
<meta http-equiv=\"Content-Security-Policy\" content=\"default-src https:\">\
<link rel=\"stylesheet\" href=\"some.css\">\

View File

@ -9,12 +9,12 @@
mod passing {
use html5ever::rcdom::{Handle, NodeData};
use crate::html;
use monolith::html;
#[test]
fn html_lang_and_body_style() {
let html = "<!doctype html><html lang=\"en\"><head></head><body></body></html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut count = 0;
fn test_walk(node: &Handle, i: &mut i8) {
@ -31,23 +31,23 @@ mod passing {
let node_name = name.local.as_ref().to_string();
if node_name == "html" {
assert_eq!(html::get_node_attr(node, "lang"), Some(str!("en")));
assert_eq!(html::get_node_attr(node, "lang"), Some("en".to_string()));
html::set_node_attr(node, "lang", Some(str!("de")));
assert_eq!(html::get_node_attr(node, "lang"), Some(str!("de")));
html::set_node_attr(node, "lang", Some("de".to_string()));
assert_eq!(html::get_node_attr(node, "lang"), Some("de".to_string()));
html::set_node_attr(node, "lang", None);
assert_eq!(html::get_node_attr(node, "lang"), None);
html::set_node_attr(node, "lang", Some(str!("")));
assert_eq!(html::get_node_attr(node, "lang"), Some(str!("")));
html::set_node_attr(node, "lang", Some("".to_string()));
assert_eq!(html::get_node_attr(node, "lang"), Some("".to_string()));
} else if node_name == "body" {
assert_eq!(html::get_node_attr(node, "style"), None);
html::set_node_attr(node, "style", Some(str!("display: none;")));
html::set_node_attr(node, "style", Some("display: none;".to_string()));
assert_eq!(
html::get_node_attr(node, "style"),
Some(str!("display: none;"))
Some("display: none;".to_string())
);
}
@ -67,7 +67,7 @@ mod passing {
#[test]
fn body_background() {
let html = "<!doctype html><html lang=\"en\"><head></head><body background=\"1\" background=\"2\"></body></html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let mut count = 0;
fn test_walk(node: &Handle, i: &mut i8) {
@ -84,7 +84,10 @@ mod passing {
let node_name = name.local.as_ref().to_string();
if node_name == "body" {
assert_eq!(html::get_node_attr(node, "background"), Some(str!("1")));
assert_eq!(
html::get_node_attr(node, "background"),
Some("1".to_string())
);
html::set_node_attr(node, "background", None);
assert_eq!(html::get_node_attr(node, "background"), None);

View File

@ -12,15 +12,16 @@ mod passing {
use std::collections::HashMap;
use url::Url;
use crate::html;
use crate::opts::Options;
use monolith::html;
use monolith::opts::Options;
use monolith::url::EMPTY_IMAGE_DATA_URL;
#[test]
fn basic() {
let cache = &mut HashMap::new();
let html: &str = "<div><P></P></div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let mut options = Options::default();
@ -42,7 +43,7 @@ mod passing {
#[test]
fn ensure_no_recursive_iframe() {
let html = "<div><P></P><iframe src=\"\"></iframe></div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -65,7 +66,7 @@ mod passing {
#[test]
fn ensure_no_recursive_frame() {
let html = "<frameset><frame src=\"\"></frameset>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -93,7 +94,7 @@ mod passing {
<style>html{background-color: #000;}</style>\
<div style=\"display: none;\"></div>\
";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -129,7 +130,7 @@ mod passing {
fn no_images() {
let html = "<link rel=\"icon\" href=\"favicon.ico\">\
<div><img src=\"http://localhost/assets/mono_lisa.png\" /></div>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -157,7 +158,7 @@ mod passing {
</div>\
</body>\
</html>",
empty_image = empty_image!()
empty_image = EMPTY_IMAGE_DATA_URL
)
);
}
@ -166,7 +167,7 @@ mod passing {
fn no_body_background_images() {
let html =
"<body background=\"no/such/image.png\" background=\"no/such/image2.png\"></body>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -190,7 +191,7 @@ mod passing {
#[test]
fn no_frames() {
let html = "<frameset><frame src=\"http://trackbook.com\"></frameset>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -222,7 +223,7 @@ mod passing {
#[test]
fn no_iframes() {
let html = "<iframe src=\"http://trackbook.com\"></iframe>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -258,7 +259,7 @@ mod passing {
<script>alert(1)</script>\
</div>\
";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -293,7 +294,7 @@ mod passing {
fn keeps_integrity_for_unfamiliar_links() {
let html = "<title>Has integrity</title>\
<link integrity=\"sha384-12345\" rel=\"something\" href=\"https://some-site.com/some-file.ext\" />";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -328,7 +329,7 @@ mod passing {
<link integrity=\"\" rel=\"stylesheet\" href=\"data:;\"/>\
<script integrity=\"\" src=\"some.js\"></script>\
";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -366,7 +367,7 @@ mod passing {
<link integrity=\"sha384-123\" rel=\"something\" href=\"data:;\"/>\
<script integrity=\"sha384-456\" src=\"some.js\"></script>\
";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -410,7 +411,7 @@ mod passing {
</body>\
</html>\
";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -452,7 +453,7 @@ mod passing {
</noscript>\
</body>\
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();
@ -480,7 +481,7 @@ mod passing {
</noscript>\
</body>\
</html>",
empty_image!(),
EMPTY_IMAGE_DATA_URL,
)
);
}
@ -488,7 +489,7 @@ mod passing {
#[test]
fn preserves_script_type_json() {
let html = "<script id=\"data\" type=\"application/json\">{\"mono\":\"lith\"}</script>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
let dom = html::html_to_dom(&html.as_bytes().to_vec(), "".to_string());
let url: Url = Url::parse("http://localhost").unwrap();
let cache = &mut HashMap::new();

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::js;
use monolith::js;
#[test]
fn onblur_camelcase() {
@ -34,7 +34,7 @@ mod passing {
#[cfg(test)]
mod failing {
use crate::js;
use monolith::js;
#[test]
fn href() {

View File

@ -2,7 +2,7 @@ mod cli;
mod css;
mod html;
mod js;
mod macros;
// mod macros;
mod opts;
mod url;
mod utils;

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::opts::Options;
use monolith::opts::Options;
#[test]
fn defaults() {
@ -24,12 +24,12 @@ mod passing {
assert_eq!(options.no_js, false);
assert_eq!(options.insecure, false);
assert_eq!(options.no_metadata, false);
assert_eq!(options.output, str!());
assert_eq!(options.output, "".to_string());
assert_eq!(options.silent, false);
assert_eq!(options.timeout, 0);
assert_eq!(options.user_agent, None);
assert_eq!(options.no_video, false);
assert_eq!(options.target, str!());
assert_eq!(options.target, "".to_string());
}
}

View File

@ -9,14 +9,15 @@
mod passing {
use reqwest::Url;
use crate::url;
use monolith::url;
#[test]
fn preserve_original() {
let u: Url = Url::parse("https://somewhere.com/font.eot#iefix").unwrap();
url::clean_url(u.clone());
let clean_u: Url = url::clean_url(u.clone());
assert_eq!(clean_u.as_str(), "https://somewhere.com/font.eot");
assert_eq!(u.as_str(), "https://somewhere.com/font.eot#iefix");
}

View File

@ -9,7 +9,7 @@
mod passing {
use reqwest::Url;
use crate::url;
use monolith::url;
#[test]
fn encode_string_with_specific_media_type() {

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::url;
use monolith::url;
#[test]
fn mailto() {
@ -80,7 +80,7 @@ mod passing {
#[cfg(test)]
mod failing {
use crate::url;
use monolith::url;
#[test]
fn url_with_no_protocol() {

View File

@ -2,6 +2,4 @@ mod clean_url;
mod create_data_url;
mod is_url_and_has_protocol;
mod parse_data_url;
mod percent_decode;
mod percent_encode;
mod resolve_url;

View File

@ -9,7 +9,7 @@
mod passing {
use reqwest::Url;
use crate::url;
use monolith::url;
#[test]
fn parse_text_html_base64() {
@ -96,7 +96,7 @@ mod passing {
mod failing {
use reqwest::Url;
use crate::url;
use monolith::url;
#[test]
fn empty_data_url() {

View File

@ -9,7 +9,7 @@
mod passing {
use reqwest::Url;
use crate::url;
use monolith::url;
#[test]
fn basic_httsp_relative() {
@ -211,7 +211,7 @@ mod passing {
mod failing {
use reqwest::Url;
use crate::url;
use monolith::url;
#[test]
fn from_data_url_to_url_with_no_protocol() {

View File

@ -9,7 +9,7 @@
mod passing {
use reqwest::Url;
use crate::utils;
use monolith::utils;
#[test]
fn image_gif87() {
@ -188,7 +188,7 @@ mod passing {
mod failing {
use reqwest::Url;
use crate::utils;
use monolith::utils;
#[test]
fn unknown_media_type() {

View File

@ -0,0 +1,154 @@
// ██████╗ █████╗ ███████╗███████╗██╗███╗ ██╗ ██████╗
// ██╔══██╗██╔══██╗██╔════╝██╔════╝██║████╗ ██║██╔════╝
// ██████╔╝███████║███████╗███████╗██║██╔██╗ ██║██║ ███╗
// ██╔═══╝ ██╔══██║╚════██║╚════██║██║██║╚██╗██║██║ ██║
// ██║ ██║ ██║███████║███████║██║██║ ╚████║╚██████╔╝
// ╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝
#[cfg(test)]
mod passing {
use monolith::utils;
#[test]
fn sub_domain_is_within_dotted_sub_domain() {
assert!(utils::domain_is_within_domain(
"news.ycombinator.com",
".news.ycombinator.com"
));
}
#[test]
fn domain_is_within_dotted_domain() {
assert!(utils::domain_is_within_domain(
"ycombinator.com",
".ycombinator.com"
));
}
#[test]
fn sub_domain_is_within_dotted_domain() {
assert!(utils::domain_is_within_domain(
"news.ycombinator.com",
".ycombinator.com"
));
}
#[test]
fn sub_domain_is_within_dotted_top_level_domain() {
assert!(utils::domain_is_within_domain(
"news.ycombinator.com",
".com"
));
}
#[test]
fn domain_is_within_itself() {
assert!(utils::domain_is_within_domain(
"ycombinator.com",
"ycombinator.com"
));
}
#[test]
fn domain_with_trailing_dot_is_within_itself() {
assert!(utils::domain_is_within_domain(
"ycombinator.com.",
"ycombinator.com"
));
}
#[test]
fn domain_with_trailing_dot_is_within_single_dot() {
assert!(utils::domain_is_within_domain("ycombinator.com.", "."));
}
#[test]
fn domain_matches_single_dot() {
assert!(utils::domain_is_within_domain("ycombinator.com", "."));
}
#[test]
fn dotted_domain_must_be_within_dotted_domain() {
assert!(utils::domain_is_within_domain(
".ycombinator.com",
".ycombinator.com"
));
}
#[test]
fn empty_is_within_dot() {
assert!(utils::domain_is_within_domain("", "."));
}
#[test]
fn both_dots() {
assert!(utils::domain_is_within_domain(".", "."));
}
}
// ███████╗ █████╗ ██╗██╗ ██╗███╗ ██╗ ██████╗
// ██╔════╝██╔══██╗██║██║ ██║████╗ ██║██╔════╝
// █████╗ ███████║██║██║ ██║██╔██╗ ██║██║ ███╗
// ██╔══╝ ██╔══██║██║██║ ██║██║╚██╗██║██║ ██║
// ██║ ██║ ██║██║███████╗██║██║ ╚████║╚██████╔╝
// ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝
#[cfg(test)]
mod failing {
use monolith::utils;
#[test]
fn sub_domain_must_not_be_within_domain() {
assert!(!utils::domain_is_within_domain(
"news.ycombinator.com",
"ycombinator.com"
));
}
#[test]
fn domain_must_not_be_within_top_level_domain() {
assert!(!utils::domain_is_within_domain("ycombinator.com", "com"));
}
#[test]
fn different_domains_must_not_be_within_one_another() {
assert!(!utils::domain_is_within_domain(
"news.ycombinator.com",
"kernel.org"
));
}
#[test]
fn sub_domain_is_not_within_wrong_top_level_domain() {
assert!(!utils::domain_is_within_domain(
"news.ycombinator.com",
"org"
));
}
#[test]
fn dotted_domain_is_not_within_domain() {
assert!(!utils::domain_is_within_domain(
".ycombinator.com",
"ycombinator.com"
));
}
#[test]
fn different_domain_is_not_within_dotted_domain() {
assert!(!utils::domain_is_within_domain(
"www.doodleoptimize.com",
".ycombinator.com"
));
}
#[test]
fn no_domain_can_be_within_empty_domain() {
assert!(!utils::domain_is_within_domain("ycombinator.com", ""));
}
#[test]
fn both_can_not_be_empty() {
assert!(!utils::domain_is_within_domain("", ""));
}
}

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::utils;
use monolith::utils;
#[test]
fn zero() {

View File

@ -1,4 +1,5 @@
mod detect_media_type;
mod domain_is_within_domain;
mod indent;
mod parse_content_type;
mod retrieve_asset;

View File

@ -7,7 +7,7 @@
#[cfg(test)]
mod passing {
use crate::utils;
use monolith::utils;
#[test]
fn text_plain_utf8() {

View File

@ -12,9 +12,9 @@ mod passing {
use std::collections::HashMap;
use std::env;
use crate::opts::Options;
use crate::url;
use crate::utils;
use monolith::opts::Options;
use monolith::url;
use monolith::utils;
#[test]
fn read_data_url() {
@ -63,13 +63,13 @@ mod passing {
cache,
&client,
&Url::parse(&format!(
"{file}{cwd}/src/tests/data/basic/local-file.html",
"{file}{cwd}/tests/_data_/basic/local-file.html",
file = file_url_protocol,
cwd = cwd.to_str().unwrap()
))
.unwrap(),
&Url::parse(&format!(
"{file}{cwd}/src/tests/data/basic/local-script.js",
"{file}{cwd}/tests/_data_/basic/local-script.js",
file = file_url_protocol,
cwd = cwd.to_str().unwrap()
))
@ -84,7 +84,7 @@ mod passing {
assert_eq!(
final_url,
Url::parse(&format!(
"{file}{cwd}/src/tests/data/basic/local-script.js",
"{file}{cwd}/tests/_data_/basic/local-script.js",
file = file_url_protocol,
cwd = cwd.to_str().unwrap()
))
@ -106,8 +106,8 @@ mod failing {
use reqwest::Url;
use std::collections::HashMap;
use crate::opts::Options;
use crate::utils;
use monolith::opts::Options;
use monolith::utils;
#[test]
fn read_local_file_with_data_url_parent() {