Compare commits

..

249 Commits

Author SHA1 Message Date
7383fdd0c0 Make parser more lax, fixes #119
Also make sure we don't print the warning message
20 times, so avoid some repeated IO.
2021-04-01 17:21:00 +02:00
f4201d946a Merge branch 'chores' 2021-03-24 18:50:26 +01:00
d5b5f1fddd Chores 2021-03-24 17:56:57 +01:00
910d660732 Update README 2021-03-08 17:59:56 +01:00
ab4ad3a1bb Merge branch 'PR/issue-106' 2021-03-08 17:36:00 +01:00
13b27101f1 Create fish config dir if it doesn't exist
Fixes #106
2021-03-08 16:18:26 +01:00
44b5d0dcff Merge branch 'PR/cabal-updates' 2021-03-08 10:35:06 +01:00
df54b3711f Update cabal.project etc 2021-03-07 20:54:53 +01:00
197288453f Update cabal.project etc 2021-03-07 19:46:44 +01:00
00d5691a13 Post release tasks 2021-03-07 14:50:08 +01:00
561b6684ce Update travis 2021-03-07 12:33:21 +01:00
a13bb3154d Update CHANGELOG 2021-03-07 12:27:04 +01:00
fd9d3faf9f Merge branch 'PR/issue-116' 2021-03-07 12:24:05 +01:00
f6cc467e95 Fix handling of stray versions wrt #116 2021-03-07 12:02:13 +01:00
ef978c1230 Add test case for issue #116 2021-03-07 11:59:45 +01:00
52af598473 Update ghcup in old yamls 2021-03-06 16:02:09 +01:00
2acacd46c2 Bump stack.yaml 2021-03-02 19:37:09 +01:00
4b2f9ddb40 Update project files 2021-03-02 19:22:14 +01:00
0d3dc4eba2 Bump ghcup version 2021-03-02 12:39:24 +01:00
824faa8091 Update CHANGELOG 2021-03-02 12:38:11 +01:00
d10cdfdf57 Merge branch 'PR/issue-115' 2021-03-02 12:34:52 +01:00
5e911793ce Document env variables wrt #95 2021-03-02 10:51:23 +01:00
8c87c9aeb7 Fix error messages and overhaul pretty printing
Fixes #115
2021-03-02 10:35:31 +01:00
9da5998a5c Add some fancy messages 2021-02-26 19:55:33 +01:00
08943dadca Fix validation 2021-02-26 16:06:08 +01:00
7fac7226c5 Update ghcup 2021-02-26 15:38:35 +01:00
9686c8ebf9 Update changelog 2021-02-26 11:53:57 +01:00
4bf96d0ec9 Revert version bump in bootstrap script 2021-02-25 19:19:53 +01:00
34add82bee Merge branch 'PR/issue-114' 2021-02-25 19:18:13 +01:00
f46e7e8c4b Add "ghcup set ghc next" tag wrt #114 2021-02-25 19:10:55 +01:00
3baf254251 Improve tag completer 2021-02-25 16:13:00 +01:00
10ca9ea827 Reformat versionCompleter 2021-02-25 15:52:28 +01:00
4a50c8ecb7 Remove network call on shell completion 2021-02-25 15:46:08 +01:00
47d9766c78 Make sure forFold can properly inline 2021-02-25 15:40:52 +01:00
45ab69960f Merge remote-tracking branch 'origin/merge-requests/70' 2021-02-25 15:36:37 +01:00
d3505d4ee6 Bump version to 0.1.13 2021-02-25 15:33:52 +01:00
9297d1a2f8 Merge branch 'arm' 2021-02-25 14:22:59 +01:00
56feb7c09e Use stages in CI 2021-02-25 14:14:01 +01:00
bede4b8712 Merge branch 'www-fix-silicon-copy' 2021-02-25 13:33:11 +01:00
95c1c55f22 Fix copy button for apple silicon 2021-02-25 13:32:46 +01:00
f547a6eb68 Support ARM/AARCH 2021-02-25 11:53:36 +01:00
Huw campbell
453a29fdf7 Respect the user's configuration settings
Only lookup user configuration before doing a search; implement version completion for Cabal and HLS removal
2021-02-25 16:31:40 +11:00
Huw campbell
1a5f0259f4 Just use the cache for commands which refer to locally stored objects.
Setting a version of GHC will fail if provided with a version not installed,
and we don't neede to check the most recent list of GHCs available to know
that.
2021-02-25 10:19:16 +11:00
Huw campbell
d6fa61e223 Add command line completions for installed and available versions.
When running `ghcup set ghc` and pressing tab, one should be able to
autocomplete the currently installed GHCs we have available.

Add an optparse applicative completer for install, rm, and set commands
which shows tags and versions. For installation, all are shown; while
for remove and set, only those installed are.
2021-02-25 00:42:16 +11:00
eab82b5d63 Update HLS to 1.0.0 2021-02-24 12:43:04 +01:00
c455b521a9 Fix ghcup-tui 2021-02-24 12:42:36 +01:00
b4f9e12293 Merge remote-tracking branch 'origin/merge-requests/54' 2021-02-24 10:13:46 +01:00
bbd353ea3a Merge branch 'PR/issue-107' 2021-02-23 18:40:04 +01:00
4189c5de69 Update CHANGELOG 2021-02-23 17:11:45 +01:00
dee3218723 Fix item selection with unavailable versions
Fixes #107
2021-02-23 17:10:48 +01:00
3c803a9f58 Merge branch 'PR/issue-104' 2021-02-23 12:52:19 +01:00
a9b0c0fbc9 Allow for dynamic post-install, post-remove and pre-compile msgs 2021-02-23 11:52:38 +01:00
38b6c918f9 Update CHANGELOG 2021-02-21 21:03:12 +01:00
6e584c96c4 Merge branch 'PR/issue-111' 2021-02-21 21:01:59 +01:00
20338f7d14 Alert user if upgraded ghcup is shadowed by old ghcup
Also alerts if the binary is not in PATH at all.

Fixes #111
2021-02-21 19:58:32 +01:00
1a995a5d63 Merge branch 'PR/cabal-3.4.0.0' 2021-02-21 16:34:18 +01:00
f964382175 Tighten checks, alpine 32bit bindists is 1st class 2021-02-21 15:37:05 +01:00
0c7f60fae6 Add more alpine 32bit bindists 2021-02-21 15:35:33 +01:00
413e63d1ca Update hasufell.de hosted bindists to webhost.haskell.org 2021-02-21 15:34:55 +01:00
8b000f4e48 Add cabal-3.4.0.0 final release 2021-02-20 23:30:28 +01:00
b0522507be Merge branch 'PR/ubunbu-20.10-update-reqs/110' 2021-02-16 16:40:03 +01:00
d4bcf7021e Merge branch 'PR/fix-109' 2021-02-16 16:26:13 +01:00
48cf0b1f67 Update libffi req for ubuntu groovy
Fixed #110
2021-02-16 14:41:49 +01:00
d82e189c01 Fix failed ghcup upgrade if destination dir doesn't exist
Fixes #109
2021-02-16 14:37:17 +01:00
e5a60d1b9a Merge branch 'ghc-8.10.4' 2021-02-06 22:39:52 +01:00
ff067351cb Add GHC-8.10.4 2021-02-06 14:28:58 +01:00
345712a617 Add freeze/project file for ghc-8.10.3 2021-02-05 11:59:57 +01:00
118dac6907 Merge branch 'bump-GHC-CI' 2021-02-05 11:51:11 +01:00
5ca40caf81 Bump GHC/Cabal in CI 2021-02-05 11:02:45 +01:00
d858187fd4 Merge remote-tracking branch 'origin/merge-requests/61' 2021-02-05 10:49:39 +01:00
bd65517df1 Add changelog for ghc-9.0.1 2021-02-04 23:20:19 +01:00
b1acad6c95 Set 8.10.3 as recommended 2021-02-04 22:59:17 +01:00
a8333281ac Bump to GHC-9.0.1 2021-02-04 22:49:49 +01:00
2fdb08ac00 Merge remote-tracking branch 'origin/merge-requests/59' 2021-01-31 11:19:22 +01:00
Javier Neira
bd4e5a2314 Update haskell-language-server to 0.9.0 2021-01-30 15:36:34 -05:00
34ed317b6b Merge remote-tracking branch 'origin/merge-requests/58' 2021-01-13 10:13:40 +08:00
Enrico Maria De Angelis
14661502ab #103: Rewording of warning message 2021-01-11 07:12:19 +00:00
097754ffdf Merge remote-tracking branch 'origin/merge-requests/57' 2021-01-05 08:24:30 +08:00
amesgen
f26ec6d295 update HLS to 0.8.0 2021-01-04 21:02:38 +01:00
858d430845 Add ghc-8.10.3 for alpine 32bit 2021-01-04 09:51:36 +08:00
5134eccbf8 Update HACKING.md 2021-01-02 16:07:41 +08:00
28b4737758 Merge remote-tracking branch 'origin/merge-requests/56' 2021-01-02 15:57:18 +08:00
amesgen
5c43ff4c9e error if we check nothing 2021-01-02 08:51:57 +01:00
amesgen
53db68e39f minor tarball filter format change 2021-01-02 07:58:08 +01:00
9e628e34dd Merge remote-tracking branch 'origin/merge-requests/55' 2021-01-02 14:27:11 +08:00
amesgen
62d5d53232 filter tool and version instead of URL 2021-01-02 05:53:11 +01:00
amesgen
56569a0698 use regex instead of substring 2021-01-02 05:05:05 +01:00
amesgen
ef44f818d0 add GHC 9.0.1-rc1 2021-01-01 05:50:54 +01:00
amesgen
8944ed6e36 allow to filter tarball validation by a URL substring
also, use nubOrd for linearithmic instead of quadratic complexity
2021-01-01 05:45:58 +01:00
51805b27aa Update CHANGELOG 2020-12-30 17:01:45 +08:00
0ec64510b3 Update CHANGELOG 2020-12-30 16:52:46 +08:00
20152443da Fix hash for 8.10.3 src dist 2020-12-30 11:47:26 +08:00
Ron Toland
7c8929fe9f add default install instructions for apple silicon 2020-12-26 06:33:32 -08:00
5617516c93 Merge remote-tracking branch 'origin/merge-requests/52' 2020-12-25 01:26:08 +08:00
f6fe08367d Fix cabal prerelease download URLs wrt #102 2020-12-24 04:46:05 +08:00
a5f02133e2 Merge remote-tracking branch 'origin/merge-requests/53' 2020-12-24 01:57:58 +08:00
Ron Toland
8ed9b4432d Add install cmd for Apple Silicon wrt #101 2020-12-23 06:51:54 -08:00
amesgen
db1d05e8ad add GHC 8.10.3 2020-12-21 00:52:00 +01:00
eae58137c8 Merge remote-tracking branch 'origin/merge-requests/47' 2020-12-20 01:45:49 +08:00
b0f90c096f Fix chmod on executables, wrt #97 2020-12-20 01:27:27 +08:00
e8361c564a Merge remote-tracking branch 'origin/merge-requests/51' 2020-12-19 03:30:40 +08:00
amesgen
54db9c9a92 update HLS to 0.7.1 2020-12-18 15:32:50 +01:00
amesgen
73db341dc8 update HLS to 0.7.0 2020-12-16 00:58:26 +01:00
5fd30b412b Merge remote-tracking branch 'origin/merge-requests/49' 2020-11-28 12:46:16 +01:00
Anton-Latukha
bbe2e87640 CHANGELOG.md: add note about ghcup directory fix` 2020-11-28 12:34:58 +02:00
Anton-Latukha
67f59f6895 bootstrap-haskell: fx XDG GHCUP_DIR value 2020-11-28 01:41:43 +02:00
Anton-Latukha
3e2df2e111 bootstrap-haskell: create GHCUP_DIR 2020-11-28 01:27:35 +02:00
824d2149c6 Merge remote-tracking branch 'origin/merge-requests/48' 2020-11-27 20:26:33 +01:00
Anton-Latukha
c86dbe043b bootstrap-haskell: mention the license
Since script is served separately from the main source code (can be opened over
https://get-ghcup.haskell.org) - mention the script license to the reciever.
2020-11-27 17:06:39 +02:00
Anton-Latukha
8043ac7f51 bootstrap-haskell: provide instructions for the main settings
Provide some basic instructive information to someone who views the script.
2020-11-27 17:04:00 +02:00
Paolo Martini
ead9d31647 Apply NO_COLOR to dimAttributes as well to cover all tui colors 2020-11-26 20:22:32 +01:00
Paolo Martini
a08e624309 Respect NO_COLOR environment variable in list and tui 2020-11-25 11:41:53 +01:00
b20371c3ac Add default values to all XDG_ variables documentation 2020-11-21 16:31:50 +01:00
0589a7cbcc Document XDG_CONFIG_HOME wrt #85 2020-11-21 16:29:26 +01:00
cf48961063 Bump ghcup to 0.1.12 2020-11-21 14:23:37 +01:00
6046582b9c Improve version ranges 2020-11-21 14:05:34 +01:00
82aa6c70ea Allow to encode version ranges for distro versions
Fixes #84
2020-11-21 01:12:15 +01:00
e829bd8235 Fix brick not updating downloads correctly 2020-11-21 00:32:58 +01:00
66f989e691 Fix FromJSONKey instances
This led to silent Nothing when the parser failed.
2020-11-20 23:18:25 +01:00
eebb91fbb0 Use extra-doc-files for CHANGELOG.md 2020-11-20 23:16:13 +01:00
1d3e88bdfe Fix disappearing HLS symlinks wrt #91
When installing a new GHC version, the corresponding
HLS symlink of that version may be accidentially removed.

Ooops.
2020-11-20 23:05:37 +01:00
fbb03dee7e Merge remote-tracking branch 'origin/merge-requests/44' into master 2020-11-12 10:49:26 +01:00
amesgen
88e5afb70f update HLS to 0.6.0 2020-11-11 23:19:05 +01:00
67eabfd3af Update CHANGELOG 2020-10-30 22:27:41 +01:00
cd1dd8c29e Merge branch 'PR/82' into master 2020-10-30 22:26:33 +01:00
08ddb591b7 Add toolchain sanity checks wrt #82 2020-10-30 21:07:49 +01:00
3e841b3c68 Merge branch 'settings' into master 2020-10-26 18:25:20 +01:00
53f5a08924 Allow configuring URLSource as well 2020-10-25 14:47:26 +01:00
d368863c3d Improve help output 2020-10-25 11:00:00 +01:00
c76cce5830 Add a --set option to install/compile, fixes #81 2020-10-25 10:54:04 +01:00
4fef93b7b1 Allow to configure ghcup with a yaml config file
Fixes #41
2020-10-25 10:22:45 +01:00
241dadbeb5 Update to versions-4.0.1 API 2020-10-25 10:22:35 +01:00
12459c2544 Add internal downloader and tui flags to stack 2020-10-25 10:22:35 +01:00
e250d6013f Redo Settings as AppState 2020-10-24 01:07:31 +02:00
0d41d180d6 Merge remote-tracking branch 'origin/merge-requests/40' into master 2020-10-14 11:59:15 +02:00
amesgen
ef251ce17e update to cabal-3.4.0.0-rc4 2020-10-13 20:53:09 +02:00
956e11c3f8 Bump version to 0.1.12 2020-10-13 00:09:35 +02:00
951b0843b2 Update hls to 0.5.1 2020-10-12 23:54:56 +02:00
a4e4080a1b Merge branch 'TUI-improvements' into master 2020-10-12 23:40:26 +02:00
2aa91c5d91 Update golden test 2020-10-12 21:48:06 +02:00
6471b3877f Bump ghcup version in bootstrap-haskell 2020-10-12 21:05:25 +02:00
778ee142d5 Upload golden files as artifacts on failure 2020-10-12 00:10:37 +02:00
1140132a25 Update hie.yaml 2020-10-11 23:40:02 +02:00
c5c299179d Remove text-conversion workaround 2020-10-11 23:39:49 +02:00
0ce4549eb8 Ditch the viewport logic 2020-10-11 23:37:27 +02:00
97d568ddd6 Show new versions in bright white 2020-10-11 21:44:11 +02:00
ea58465240 Expand the selected bar 2020-10-11 21:16:48 +02:00
7afd262b1b Put separators between tools 2020-10-11 21:07:21 +02:00
34ceaa0823 Add brick to stack.yaml 2020-10-11 21:06:58 +02:00
57c34a07f2 Allow to hide old versions of tools in TUI 2020-10-09 23:05:11 +02:00
73d1d97f1f Reverse order of tool list in TUI 2020-10-09 20:25:52 +02:00
f7ed1a4bde Commit ghc-8.8.4 cabal.project files
These are auto-generated from stack and meant for users.
2020-10-05 22:11:05 +02:00
bdd80d0229 Bump stack lts 2020-10-05 22:10:52 +02:00
0238f70c64 Bump hls to 0.5.0 2020-10-05 00:19:35 +02:00
24ff430c45 Add stack.yaml wrt #75 2020-10-01 17:49:23 +02:00
281fb14d4c Rename ghc-9.0.1-alpha1 to 9.0.0.20200925 wrt #73 2020-09-29 23:33:55 +02:00
03bac93929 Improve bootstrap test 2020-09-29 10:57:21 +02:00
1ae0c2a654 Move printf to the right place 2020-09-29 10:52:31 +02:00
8140936bd3 Test bootstrap-haskell wrt #72 2020-09-29 10:23:29 +02:00
8786acf476 Fix BOOTSTRAP_HASKELL_NONINTERACTIVE 2020-09-29 10:12:03 +02:00
c460d4c743 Update bindists for Mint to fedora 2020-09-29 00:22:58 +02:00
5294adf0d7 Fix bootstrap-script 2020-09-28 23:52:23 +02:00
00f3fa35fd Add hls installation to bootstrap-haskell 2020-09-28 23:48:59 +02:00
9c9aa4f9c0 Fix CI 2020-09-28 23:27:15 +02:00
e23a187cc4 Add ghc-9.0.1-alpha1 2020-09-28 23:23:11 +02:00
3e429945dc Also modify .bash_profile on mac 2020-09-28 09:59:35 +02:00
8be2a8eed7 Update CHANGELOG date for 0.1.11 2020-09-23 09:53:44 +02:00
6b65167581 Update yaml files 2020-09-23 00:27:28 +02:00
9d7914e69a Bump ghcupURL 2020-09-22 23:41:19 +02:00
6c62884b24 Update Changelog 2020-09-22 21:31:01 +02:00
965d2a3ba8 Drop 'ghcup compile cabal'
Upstream has discontinued the old bootstrap shell script.
The new python shell script doesn't work like the old one
and is only useful for bootstrapping to a new architecture.

If you miss this feature, consider running:
  cabal install cabal-install

with the appropriate GHC version set (this might need some
experimenting).

This also fixes #64
2020-09-22 21:26:10 +02:00
40a1cc98c6 Drop use of table-layout, thanks to Simon 2020-09-22 21:05:59 +02:00
4c2d4ee6bd Update hls tarballs 2020-09-22 19:23:24 +02:00
9276664465 Merge branch 'hls' into master 2020-09-22 15:23:10 +02:00
a94bcdb92d Remove 9.0.1-alpha1
Ben requested removal, since this wasn't an announced
release.
2020-09-21 14:13:45 +02:00
5da5fabfef Use alpine:3.12 instead of edge 2020-09-21 12:23:14 +02:00
05cc55c52d Improve brick UI 2020-09-21 10:40:06 +02:00
571df1349c Update hie.yaml 2020-09-20 23:09:09 +02:00
cbbb75062c Bump version to 0.1.11 2020-09-20 23:09:09 +02:00
bb7c4205db Allow to install haskell-language-server wrt #65 2020-09-20 23:09:09 +02:00
b2027f1625 Simplify installing GHC from custom bindist wrt #60 2020-09-19 11:52:12 +02:00
65945c87df Update CHANGELOG 2020-09-19 11:51:48 +02:00
081582d3e1 Merge branch 'compile-overwrite' into master 2020-09-18 09:31:14 +02:00
bf240af518 Fix build
https://github.com/cjdev/text-conversions/pull/10
2020-09-17 22:18:03 +02:00
a269131e2d Allow to compile over existing version, fixes #59 2020-09-17 21:21:16 +02:00
59ece98fdc Fix bug in compileGHC cleanup logic 2020-09-17 21:20:38 +02:00
563924ff26 Fix gitlab CI 2020-09-15 22:22:15 +02:00
8ee3f55428 Fix test on i386 2020-09-15 21:59:56 +02:00
93c17607b5 Fix haddock build, fixes #62 2020-09-15 17:44:30 +02:00
8b4c239444 Add changelog entry for cabal-3.4.0.0-rc3 2020-09-15 15:43:07 +02:00
8bef17bf59 Add cabal-3.4.0.0-rc3 2020-09-15 15:38:10 +02:00
a649146a39 Add hspec tests to gitlab CI 2020-09-13 21:13:42 +02:00
9d6a5313ab Add JSON roundtrip specs 2020-09-13 21:10:13 +02:00
de09c950d5 Improve requirements wording, fixes #56 2020-09-13 15:38:51 +02:00
47838b1bd9 Merge branch 'compile-bindist' into master 2020-09-13 13:30:28 +02:00
02b360e2a9 Create bindists when compiling GHC wrt #51 2020-09-12 23:47:12 +02:00
c10ab15e0c Update cabal pre-release ot 3.4.0.0-rc2 2020-09-04 10:16:28 +02:00
46f3da1a94 Merge branch 'fix-symlink-support' into master 2020-09-01 21:07:42 +02:00
7ec9d90aab Fix build with libarchive-3.0.0.0 2020-09-01 19:55:48 +02:00
326bf510c9 Fix Error when ~/.ghcup is a valid symlink
Fixes #49
2020-08-31 13:03:12 +02:00
ce3d1f4309 Add GHC-9.0.1-alpha1 2020-08-27 23:41:07 +02:00
b31ba883e4 Add vim integration section to README 2020-08-23 14:07:49 +02:00
e5d1c04616 Fix bootstrap-haskell for fish shell, fixes #48 2020-08-20 15:51:37 +02:00
34ff0ed9cf Fix dlSubDir for GHC-8.10.2 on alpine wrt #47 2020-08-20 15:49:28 +02:00
85bd87d5f3 Clarify 2020-08-19 19:40:59 +02:00
8b274214af Fix typo 2020-08-19 19:38:36 +02:00
069e3102f4 Fix anchor 2020-08-19 19:36:13 +02:00
8623b32721 Add "Install custom bindists" to README 2020-08-19 19:34:04 +02:00
6342e8edf0 Use 8.10.2 in gitlab CI 2020-08-15 23:54:50 +02:00
bbd8f0c84c Add GHC-8.10.2 for alpine 32big 2020-08-15 23:34:05 +02:00
873c951d6e Refactor chmod +x 2020-08-14 22:27:05 +02:00
d9c864d3c5 Make sure cabal is executable wrt #46 2020-08-14 22:07:39 +02:00
4280d7109a Fix 3.4.0.0-rc1 urls wrt #46 2020-08-14 21:49:01 +02:00
c8855c068f Update version in bootstrap-haskell 2020-08-14 20:36:14 +02:00
90503061e9 Add ghcup-0.1.10 2020-08-14 20:22:27 +02:00
672ebf6426 Bump version 2020-08-14 16:57:15 +02:00
fd76fde23a Add cabal-3.4.0.0-rc1 2020-08-14 16:54:27 +02:00
e24c9a3ffe Show stray cabals, fixes #45 2020-08-14 16:53:32 +02:00
2641d50c21 Update ghcup binaries 2020-08-14 09:42:41 +02:00
202f3ea3ba Fix bug where setting non-installed GHC unsets current one 2020-08-13 20:40:09 +02:00
4f09e3ff7e Update CHANGELOG 2020-08-13 17:01:09 +02:00
1148219130 Fix README 2020-08-12 10:13:18 +02:00
4b47800dfb CHANGELOG fix 2020-08-11 22:38:22 +02:00
e2c4db9132 Rm unneeded note 2020-08-11 22:04:08 +02:00
90af68b211 Pre-release 0.1.9 2020-08-11 21:55:15 +02:00
80603662b4 Merge branch 'cabal-install-3.4.0.0-rc1' 2020-08-11 21:34:45 +02:00
d2c5d4dfd9 Test that we're not missing GHCup alpine 2020-08-11 20:23:15 +02:00
6f1b8b4041 Fix build on 32bit 2020-08-11 19:47:29 +02:00
f63b2bf744 Fix CI 2020-08-11 11:48:42 +02:00
71cb75c170 Update .gitignore 2020-08-10 22:28:02 +02:00
dac64f5718 Make TarDir backwardscompat 2020-08-10 22:28:02 +02:00
27b2d2ac7d Fix cabal.project for cabal-3.4 2020-08-10 22:28:01 +02:00
47142dd376 Test on 32bit 2020-08-10 22:28:01 +02:00
d071a7e51b Avoid duplicate edits to .bashrc etc 2020-08-10 22:28:01 +02:00
5c45884f5f Allow to specify regex for subdir 2020-08-10 22:27:50 +02:00
cafedd73a2 Use Settings to avoid querying dirs every time 2020-08-10 21:52:30 +02:00
7163b77837 Only query directories once 2020-08-10 21:51:31 +02:00
122c54b51e Refactor 2020-08-10 21:51:31 +02:00
b9d7d7d007 Fix licences in module haddock 2020-08-10 21:51:29 +02:00
9050c9792a Improve bootstrap-haskell 2020-08-10 21:51:20 +02:00
aac8f760ad Add xdg support wrt #39 2020-08-10 21:51:19 +02:00
7d334c18f5 Don't stop TUI on subcommand failure 2020-08-10 21:51:19 +02:00
86b0e4b31b Fix cabalSet for pre-release versions 2020-08-10 21:51:19 +02:00
af811f3dbc nub result in getInstalledCabals 2020-08-10 21:51:19 +02:00
d30d2ac8a5 Add cabal-install-3.4.0.0-rc1 2020-08-10 21:51:00 +02:00
86a4b10de6 Merge branch 'yaml' 2020-08-10 08:58:55 +02:00
cfa7049ab9 Merge branch 'CI' 2020-08-09 22:11:48 +02:00
391676e90a Use yaml instead of pesky json 2020-08-09 21:56:11 +02:00
a046f16308 Remove libtinfo compat symlinks 2020-07-21 00:36:31 +02:00
118 changed files with 30054 additions and 21006 deletions

1
.gitignore vendored
View File

@@ -12,3 +12,4 @@ tags
TAGS
/tmp/
.entangled
release/

View File

@@ -1,3 +1,8 @@
stages:
- hlint
- test
- release
variables:
GIT_SSL_NO_VERIFY: "1"
@@ -14,34 +19,53 @@ variables:
- x86_64-linux
variables:
OS: "LINUX"
ARCH: "64"
.alpine:64bit:
image: "alpine:edge"
image: "alpine:3.12"
tags:
- x86_64-linux
variables:
OS: "LINUX"
BIT: "64"
ARCH: "64"
.alpine:32bit:
image: "i386/alpine:edge"
image: "i386/alpine:3.12"
tags:
- x86_64-linux
variables:
OS: "LINUX"
BIT: "32"
ARCH: "32"
.linux:armv7:
image: "arm32v7/fedora"
tags:
- armv7-linux
variables:
OS: "LINUX"
ARCH: "ARM"
.linux:aarch64:
image: "arm64v8/fedora"
tags:
- aarch64-linux
variables:
OS: "LINUX"
ARCH: "ARM64"
.darwin:
tags:
- x86_64-darwin
variables:
OS: "DARWIN"
ARCH: "64"
.freebsd:
tags:
- x86_64-freebsd
variables:
OS: "FREEBSD"
ARCH: "64"
.root_cleanup:
after_script:
@@ -57,7 +81,12 @@ variables:
script:
- ./.gitlab/script/ghcup_version.sh
variables:
JSON_VERSION: "0.0.2"
JSON_VERSION: "0.0.4"
artifacts:
expire_in: 2 week
paths:
- golden
when: on_failure
.test_ghcup_version:linux:
extends:
@@ -66,6 +95,27 @@ variables:
before_script:
- ./.gitlab/before_script/linux/install_deps.sh
.test_ghcup_version:linux32:
extends:
- .test_ghcup_version
- .alpine:32bit
before_script:
- ./.gitlab/before_script/linux/alpine/install_deps.sh
.test_ghcup_version:armv7:
extends:
- .test_ghcup_version
- .linux:armv7
before_script:
- ./.gitlab/before_script/linux/install_deps_manual.sh
.test_ghcup_version:aarch64:
extends:
- .test_ghcup_version
- .linux:aarch64
before_script:
- ./.gitlab/before_script/linux/install_deps_manual.sh
.test_ghcup_version:darwin:
extends:
- .test_ghcup_version
@@ -91,58 +141,131 @@ variables:
- out
only:
- tags
variables:
JSON_VERSION: "0.0.4"
######## stack test ########
test:linux:stack:
stage: test
before_script:
- ./.gitlab/before_script/linux/install_deps_minimal.sh
script:
- ./.gitlab/script/ghcup_stack.sh
extends:
- .debian
needs: []
######## bootstrap test ########
test:linux:bootstrap_script:
stage: test
before_script:
- ./.gitlab/before_script/linux/install_deps_minimal.sh
script:
- ./.gitlab/script/ghcup_bootstrap.sh
variables:
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
extends:
- .debian
needs: []
######## linux test ########
test:linux:recommended:
stage: test
extends: .test_ghcup_version:linux
variables:
GHC_VERSION: "8.8.4"
CABAL_VERSION: "3.2.0.0"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
needs: []
test:linux:latest:
stage: test
extends: .test_ghcup_version:linux
variables:
GHC_VERSION: "8.10.1"
CABAL_VERSION: "3.2.0.0"
allow_failure: true
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
needs: []
######## linux 32bit test ########
test:linux:recommended:32bit:
stage: test
extends: .test_ghcup_version:linux32
variables:
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.2.0.0"
needs: []
######## arm tests ########
test:linux:recommended:armv7:
stage: test
extends: .test_ghcup_version:armv7
variables:
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
when: manual
needs: []
test:linux:recommended:aarch64:
stage: test
extends: .test_ghcup_version:aarch64
variables:
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
when: manual
needs: []
######## darwin test ########
test:mac:recommended:
stage: test
extends: .test_ghcup_version:darwin
variables:
GHC_VERSION: "8.8.4"
CABAL_VERSION: "3.2.0.0"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
needs: []
test:mac:latest:
stage: test
extends: .test_ghcup_version:darwin
variables:
GHC_VERSION: "8.10.1"
CABAL_VERSION: "3.2.0.0"
allow_failure: true
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
needs: []
######## freebsd test ########
test:freebsd:recommended:
stage: test
extends: .test_ghcup_version:freebsd
variables:
GHC_VERSION: "8.8.4"
CABAL_VERSION: "3.2.0.0"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
allow_failure: true # freebsd runners are unreliable
when: manual
needs: []
test:freebsd:latest:
stage: test
extends: .test_ghcup_version:freebsd
variables:
GHC_VERSION: "8.10.1"
CABAL_VERSION: "3.2.0.0"
allow_failure: true
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
allow_failure: true # freebsd runners are unreliable
when: manual
needs: []
######## linux release ########
release:linux:64bit:
stage: release
needs: ["test:linux:recommended", "test:linux:latest"]
extends:
- .alpine:64bit
- .release_ghcup
@@ -150,11 +273,13 @@ release:linux:64bit:
- ./.gitlab/before_script/linux/alpine/install_deps.sh
variables:
ARTIFACT: "x86_64-linux-ghcup"
GHC_VERSION: "8.8.4"
CABAL_VERSION: "3.2.0.0"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
release:linux:32bit:
stage: release
needs: ["test:linux:recommended:32bit"]
extends:
- .alpine:32bit
- .release_ghcup
@@ -162,13 +287,40 @@ release:linux:32bit:
- ./.gitlab/before_script/linux/alpine/install_deps.sh
variables:
ARTIFACT: "i386-linux-ghcup"
GHC_VERSION: "8.8.4"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.2.0.0"
release:linux:armv7:
stage: release
needs: ["test:linux:recommended:armv7"]
extends:
- .linux:armv7
- .release_ghcup
before_script:
- ./.gitlab/before_script/linux/install_deps_manual.sh
variables:
ARTIFACT: "armv7-linux-ghcup"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
release:linux:aarch64:
stage: release
needs: ["test:linux:recommended:aarch64"]
extends:
- .linux:aarch64
- .release_ghcup
before_script:
- ./.gitlab/before_script/linux/install_deps_manual.sh
variables:
ARTIFACT: "aarch64-linux-ghcup"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
######## darwin release ########
release:darwin:
stage: release
needs: ["test:mac:recommended", "test:mac:latest"]
extends:
- .darwin
- .release_ghcup
@@ -177,14 +329,16 @@ release:darwin:
- ./.gitlab/before_script/darwin/install_deps.sh
variables:
ARTIFACT: "x86_64-apple-darwin-ghcup"
GHC_VERSION: "8.8.3"
CABAL_VERSION: "3.2.0.0"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
MACOSX_DEPLOYMENT_TARGET: "10.7"
######## freebsd release ########
release:freebsd:
stage: release
needs: ["test:freebsd:recommended", "test:freebsd:latest"]
extends:
- .freebsd
- .release_ghcup
@@ -193,6 +347,27 @@ release:freebsd:
- ./.gitlab/before_script/freebsd/install_deps.sh
variables:
ARTIFACT: "x86_64-portbld-freebsd-ghcup"
GHC_VERSION: "8.8.3"
CABAL_VERSION: "3.2.0.0"
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
######## hlint ########
hlint:
stage: hlint
extends:
- .alpine:64bit
before_script:
- ./.gitlab/before_script/linux/alpine/install_deps.sh
script:
- ./.gitlab/script/hlint.sh
variables:
GHC_VERSION: "8.10.4"
CABAL_VERSION: "3.4.0.0"
JSON_VERSION: "0.0.4"
allow_failure: true
artifacts:
expire_in: 2 week
paths:
- report.html
when: on_failure

View File

@@ -9,8 +9,9 @@ mkdir -p "${TMPDIR}"
curl -sSfL https://downloads.haskell.org/~ghcup/x86_64-apple-darwin-ghcup > ./ghcup-bin
chmod +x ghcup-bin
./ghcup-bin install ${GHC_VERSION}
./ghcup-bin set ${GHC_VERSION}
./ghcup-bin install-cabal ${CABAL_VERSION}
./ghcup-bin upgrade -i -f
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install ${GHC_VERSION}
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml set ${GHC_VERSION}
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install-cabal ${CABAL_VERSION}
exit 0

View File

@@ -11,8 +11,9 @@ mkdir -p "${TMPDIR}"
curl -sSfL https://downloads.haskell.org/~ghcup/x86_64-portbld-freebsd-ghcup > ./ghcup-bin
chmod +x ghcup-bin
./ghcup-bin install ${GHC_VERSION}
./ghcup-bin install-cabal ${CABAL_VERSION}
./ghcup-bin set ${GHC_VERSION}
./ghcup-bin upgrade -i -f
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install ${GHC_VERSION}
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml set ${GHC_VERSION}
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install-cabal ${CABAL_VERSION}
exit 0

View File

@@ -10,6 +10,9 @@ apk add --no-cache \
curl \
gcc \
g++ \
binutils \
binutils-gold \
bsd-compat-headers \
gmp-dev \
ncurses-dev \
libffi-dev \
@@ -18,18 +21,15 @@ apk add --no-cache \
tar \
perl
ln -sf libncurses.so /usr/lib/libtinfo.so
ln -sf libncursesw.so.6 /usr/lib/libtinfow.so.6
ln -sf libtinfow.so.6 /usr/lib/libtinfow.so
if [ "${BIT}" = "32" ] ; then
if [ "${ARCH}" = "32" ] ; then
curl -sSfL https://downloads.haskell.org/ghcup/i386-linux-ghcup > ./ghcup-bin
else
curl -sSfL https://downloads.haskell.org/ghcup/x86_64-linux-ghcup > ./ghcup-bin
fi
chmod +x ghcup-bin
./ghcup-bin install ${GHC_VERSION}
./ghcup-bin install-cabal ${CABAL_VERSION}
./ghcup-bin upgrade -i -f
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install ${GHC_VERSION}
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install-cabal ${CABAL_VERSION}
# utils
apk add --no-cache \
@@ -49,5 +49,3 @@ apk add --no-cache \
xz-dev \
ncurses-static
ln -sf libncursesw.a /usr/lib/libtinfow.a

View File

@@ -12,7 +12,8 @@ sudo apt-get install -y libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev li
curl -sSfL https://downloads.haskell.org/~ghcup/x86_64-linux-ghcup > ./ghcup-bin
chmod +x ghcup-bin
./ghcup-bin install ${GHC_VERSION}
./ghcup-bin set ${GHC_VERSION}
./ghcup-bin install-cabal ${CABAL_VERSION}
./ghcup-bin upgrade -i -f
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install ${GHC_VERSION}
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml set ${GHC_VERSION}
./ghcup-bin -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml install-cabal ${CABAL_VERSION}

View File

@@ -0,0 +1,64 @@
#!/bin/sh
set -eux
. "$( cd "$(dirname "$0")" ; pwd -P )/../../ghcup_env"
mkdir -p "${TMPDIR}"
ednf() {
case "${ARCH}" in
"ARM")
sudo dnf -y --forcearch armv7hl "$@"
;;
"ARM64")
sudo dnf -y --forcearch aarch64 "$@"
;;
*) exit 1 ;;
esac
}
ednf update
ednf install gcc gcc-c++ gmp gmp-devel make ncurses ncurses-devel xz xz-devel perl zlib zlib-devel openssl-devel openssl-libs openssl libffi libffi-devel
if [ "${ARCH}" = "ARM64" ] ; then
ednf install numactl numactl-libs numactl-devel
fi
ednf install bash wget curl git tar
ednf install llvm9.0 llvm9.0-devel llvm9.0-libs llvm9.0-static
case "${ARCH}" in
"ARM")
ghc_url=https://downloads.haskell.org/~ghc/${GHC_VERSION}/ghc-${GHC_VERSION}-armv7-deb10-linux.tar.xz
cabal_url=home.smart-cactus.org/~ben/cabal-install-${CABAL_VERSION}-armv7-linux-bootstrapped.tar.xz
;;
"ARM64")
ghc_url=https://downloads.haskell.org/~ghc/${GHC_VERSION}/ghc-${GHC_VERSION}-aarch64-deb10-linux.tar.xz
cabal_url=https://downloads.haskell.org/~cabal/cabal-install-${CABAL_VERSION}/cabal-install-${CABAL_VERSION}-aarch64-ubuntu-18.04.tar.xz
;;
*) exit 1 ;;
esac
mkdir -p "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/bin
curl -O "${ghc_url}"
tar -xf ghc-*.tar.*
cd ghc-${GHC_VERSION}
./configure --prefix="${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/ghc/${GHC_VERSION}
make install
for i in "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/ghc/${GHC_VERSION}/bin/*-${GHC_VERSION} ; do
ln -s "${i}" "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/bin/${i##*/}
done
for x in "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/bin/*-${GHC_VERSION} ; do
ln -s ${x##*/} ${x%-${GHC_VERSION}}
done
cd ..
rm -rf ghc-${GHC_VERSION} ghc-*.tar.*
unset x i
mkdir cabal-install
cd cabal-install
curl -O "${cabal_url}"
tar -xf cabal-install-*
mv cabal "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/bin/cabal
cd ..
rm -rf cabal-install

View File

@@ -0,0 +1,10 @@
#!/bin/sh
set -eux
. "$( cd "$(dirname "$0")" ; pwd -P )/../../ghcup_env"
mkdir -p "${TMPDIR}"
sudo apt-get update -y
sudo apt-get install -y libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev git wget

View File

@@ -0,0 +1,30 @@
#!/bin/sh
set -eux
. "$( cd "$(dirname "$0")" ; pwd -P )/../ghcup_env"
mkdir -p "$CI_PROJECT_DIR"/.local/bin
ecabal() {
cabal --store-dir="$(pwd)"/.store "$@"
}
eghcup() {
ghcup -v -c -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml "$@"
}
git describe --always
### build
ecabal update
export BOOTSTRAP_HASKELL_NONINTERACTIVE=yes
export BOOTSTRAP_HASKELL_GHC_VERSION=$GHC_VERSION
export BOOTSTRAP_HASKELL_CABAL_VERSION=$CABAL_VERSION
./bootstrap-haskell
[ "$(ghc --numeric-version)" = "${GHC_VERSION}" ]

View File

@@ -16,15 +16,17 @@ git describe
ecabal update
if [ "${OS}" = "LINUX" ] ; then
if [ "${BIT}" = "32" ] ; then
if [ "${ARCH}" = "32" ] ; then
ecabal build -w ghc-${GHC_VERSION} --ghc-options='-split-sections -optl-static' -ftui -ftar
else
elif [ "${ARCH}" = "64" ] ; then
ecabal build -w ghc-${GHC_VERSION} --ghc-options='-split-sections -optl-static' -ftui
else
ecabal build -w ghc-${GHC_VERSION} -ftui
fi
elif [ "${OS}" = "FREEBSD" ] ; then
ecabal build -w ghc-${GHC_VERSION} --ghc-options='-split-sections' --constraint="zlib static" -ftui
ecabal build -w ghc-${GHC_VERSION} --ghc-options='-split-sections' --constraint="zlib +bundled-c-zlib" -ftui
else
ecabal build -w ghc-${GHC_VERSION} --constraint="zlib static" --constraint="lzma static" -ftui
ecabal build -w ghc-${GHC_VERSION} --constraint="zlib +bundled-c-zlib" --constraint="lzma +static" -ftui
fi
mkdir out

21
.gitlab/script/ghcup_stack.sh Executable file
View File

@@ -0,0 +1,21 @@
#!/bin/sh
set -eux
. "$( cd "$(dirname "$0")" ; pwd -P )/../ghcup_env"
mkdir -p "$CI_PROJECT_DIR"/.local/bin
git describe --always
### build
curl -L -O https://get.haskellstack.org/stable/linux-x86_64.tar.gz
tar xf linux-x86_64.tar.gz
cp stack-*-linux-*/stack "$CI_PROJECT_DIR"/.local/bin/stack
chmod +x "$CI_PROJECT_DIR"/.local/bin/stack
mkdir -p "$CI_PROJECT_DIR"/.stack_root
export TAR_OPTIONS=--no-same-owner
stack --allow-different-user --stack-root "$CI_PROJECT_DIR"/.stack_root build
stack --allow-different-user --stack-root "$CI_PROJECT_DIR"/.stack_root test

View File

@@ -11,7 +11,7 @@ ecabal() {
}
eghcup() {
ghcup -v -c -s file://$(pwd)/ghcup-${JSON_VERSION}.json "$@"
ghcup -v -c -s file://$(pwd)/ghcup-${JSON_VERSION}.yaml "$@"
}
git describe --always
@@ -20,16 +20,31 @@ git describe --always
ecabal update
(
cd /tmp
ecabal install -w ghc-${GHC_VERSION} --installdir="$CI_PROJECT_DIR"/.local/bin hspec-discover
)
if [ "${OS}" = "DARWIN" ] ; then
ecabal build -w ghc-${GHC_VERSION} -ftui
ecabal test -w ghc-${GHC_VERSION} -ftui ghcup-test
elif [ "${OS}" = "LINUX" ] ; then
if [ "${ARCH}" = "32" ] ; then
ecabal build -w ghc-${GHC_VERSION} -finternal-downloader -ftui -ftar
ecabal test -w ghc-${GHC_VERSION} -finternal-downloader -ftui -ftar ghcup-test
else
ecabal build -w ghc-${GHC_VERSION} -finternal-downloader -ftui
ecabal test -w ghc-${GHC_VERSION} -finternal-downloader -ftui ghcup-test
fi
else
ecabal build -w ghc-${GHC_VERSION} -finternal-downloader -ftui
ecabal test -w ghc-${GHC_VERSION} -finternal-downloader -ftui ghcup-test
fi
ecabal haddock
ecabal haddock -w ghc-${GHC_VERSION} -ftar
cp "$(ecabal new-exec --enable-tests --verbose=0 --offline sh -- -c 'command -v ghcup')" .
cp "$(ecabal new-exec --enable-tests --verbose=0 --offline sh -- -c 'command -v ghcup-gen')" .
cp "$(ecabal new-exec -w ghc-${GHC_VERSION} --verbose=0 --offline sh -- -c 'command -v ghcup')" .
cp "$(ecabal new-exec -w ghc-${GHC_VERSION} --verbose=0 --offline sh -- -c 'command -v ghcup-gen')" .
cp ./ghcup "$CI_PROJECT_DIR"/.local/bin/ghcup
cp ./ghcup-gen "$CI_PROJECT_DIR"/.local/bin/ghcup-gen
@@ -42,7 +57,7 @@ rm -rf "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup
### manual cli based testing
ghcup-gen check -f ghcup-${JSON_VERSION}.json
ghcup-gen check -f ghcup-${JSON_VERSION}.yaml
eghcup --numeric-version
@@ -68,21 +83,37 @@ ghci-$(ghc --numeric-version) --version
# test installing new ghc doesn't mess with currently set GHC
# https://gitlab.haskell.org/haskell/ghcup-hs/issues/7
if [ "${OS}" = "LINUX" ] ; then
eghcup --downloader=wget install 8.4.4
eghcup --downloader=wget install 8.10.3
else # test wget a bit
eghcup install 8.4.4
eghcup install 8.10.3
fi
[ "$(ghc --numeric-version)" = "${ghc_ver}" ]
eghcup set 8.4.4
eghcup set 8.4.4
[ "$(ghc --numeric-version)" = "8.4.4" ]
eghcup set 8.10.3
eghcup set 8.10.3
[ "$(ghc --numeric-version)" = "8.10.3" ]
eghcup set ${GHC_VERSION}
[ "$(ghc --numeric-version)" = "${ghc_ver}" ]
eghcup rm 8.4.4
eghcup rm 8.10.3
[ "$(ghc --numeric-version)" = "${ghc_ver}" ]
# install hls
if [ "${OS}" = "DARWIN" ] ; then
eghcup install hls
haskell-language-server-wrapper --version
elif [ "${OS}" = "LINUX" ] ; then
if [ "${ARCH}" = "64" ] ; then
eghcup install hls
haskell-language-server-wrapper --version
fi
fi
eghcup rm $(ghc --numeric-version)
# https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/116
eghcup install cabal -u https://oleg.fi/cabal-install-3.4.0.0-rc4/cabal-install-3.4.0.0-x86_64-ubuntu-16.04.tar.xz 3.4.0.0-rc4
eghcup rm cabal 3.4.0.0-rc4
eghcup upgrade
eghcup upgrade -f

19
.gitlab/script/hlint.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/bin/sh
set -eux
. "$( cd "$(dirname "$0")" ; pwd -P )/../ghcup_env"
mkdir -p "$CI_PROJECT_DIR"/.local/bin
ecabal() {
cabal --store-dir="$(pwd)"/.store "$@"
}
git describe
ecabal update
ecabal install -w ghc-${GHC_VERSION} --installdir="$CI_PROJECT_DIR"/.local/bin hlint
hlint -r lib/ test/

83
.hlint.yaml Normal file
View File

@@ -0,0 +1,83 @@
# HLint configuration file
# https://github.com/ndmitchell/hlint
##########################
# This file contains a template configuration file, which is typically
# placed as .hlint.yaml in the root of your project
# Warnings currently triggered by your code
- ignore: {name: "Redundant bang pattern"}
- ignore: {name: "Use camelCase"}
- ignore: {name: "Use if"}
- ignore: {name: "Use newtype instead of data"}
- ignore: {name: "Use <$>"}
- ignore: {name: "Use mapMaybe"}
- ignore: {name: "Use const"}
- ignore: {name: "Use list comprehension"}
- ignore: {name: "Redundant multi-way if"}
- ignore: {name: "Redundant lambda"}
- ignore: {name: "Avoid lambda"}
- ignore: {name: "Use uncurry"}
- ignore: {name: "Use replicateM"}
- ignore: {name: "Redundant irrefutable pattern"}
# Specify additional command line arguments
#
# - arguments: [--color, --cpp-simple, -XQuasiQuotes]
# Control which extensions/flags/modules/functions can be used
#
# - extensions:
# - default: false # all extension are banned by default
# - name: [PatternGuards, ViewPatterns] # only these listed extensions can be used
# - {name: CPP, within: CrossPlatform} # CPP can only be used in a given module
#
# - flags:
# - {name: -w, within: []} # -w is allowed nowhere
#
# - modules:
# - {name: [Data.Set, Data.HashSet], as: Set} # if you import Data.Set qualified, it must be as 'Set'
# - {name: Control.Arrow, within: []} # Certain modules are banned entirely
#
# - functions:
# - {name: unsafePerformIO, within: []} # unsafePerformIO can only appear in no modules
# Add custom hints for this project
#
# Will suggest replacing "wibbleMany [myvar]" with "wibbleOne myvar"
# - error: {lhs: "wibbleMany [x]", rhs: wibbleOne x}
# The hints are named by the string they display in warning messages.
# For example, if you see a warning starting like
#
# Main.hs:116:51: Warning: Redundant ==
#
# You can refer to that hint with `{name: Redundant ==}` (see below).
# Turn on hints that are off by default
#
# Ban "module X(module X) where", to require a real export list
# - warn: {name: Use explicit module export list}
#
# Replace a $ b $ c with a . b $ c
# - group: {name: dollar, enabled: true}
#
# Generalise map to fmap, ++ to <>
# - group: {name: generalise, enabled: true}
# Ignore some builtin hints
# - ignore: {name: Use let}
# - ignore: {name: Use const, within: SpecialModule} # Only within certain modules
# Define some custom infix operators
# - fixity: infixr 3 ~^#^~
# To generate a suitable file for HLint do:
# $ hlint --default > .hlint.yaml

View File

@@ -2,23 +2,21 @@
set -ex
## install ghc via old ghcup
mkdir -p ~/.ghcup/bin
curl https://gitlab.haskell.org/haskell/ghcup/raw/master/ghcup > ~/.ghcup/bin/ghcup
curl -sSfL https://downloads.haskell.org/~ghcup/x86_64-apple-darwin-ghcup > ~/.ghcup/bin/ghcup
chmod +x ~/.ghcup/bin/ghcup
export PATH="$HOME/.ghcup/bin:$PATH"
ghcup install 8.8.3
ghcup install-cabal 3.2.0.0
ghcup set 8.8.3
ghcup install 8.10.4
ghcup install-cabal 3.4.0.0
ghcup set 8.10.4
## install ghcup
cabal update
cabal build --constraint="zlib static" --constraint="lzma static" -ftui
cabal build --constraint="zlib +static" --constraint="lzma +static" -ftui
cp "$(cabal new-exec --verbose=0 --offline sh -- -c 'command -v ghcup')" .
strip ./ghcup
cp ghcup "./${ARTIFACT}"

View File

@@ -1,173 +0,0 @@
# This Travis job script has been generated by a script via
#
# haskell-ci '-o' '.travis.yml' '--irc-channels=irc.freenode.org#hackage' 'zlib.cabal' '--apt=zlib1g-dev' '--tests-jobs=>=7.4'
#
# For more information, see https://github.com/haskell-CI/haskell-ci
#
# version: 0.9.20200325
#
version: ~> 1.0
language: c
os: linux
dist: xenial
git:
# whether to recursively clone submodules
submodules: false
notifications:
irc:
channels:
- irc.freenode.org#hackage
skip_join: true
template:
- "\x0313zlib\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}"
cache:
directories:
- $HOME/.cabal/packages
- $HOME/.cabal/store
- $HOME/.hlint
before_cache:
- rm -fv $CABALHOME/packages/hackage.haskell.org/build-reports.log
# remove files that are regenerated by 'cabal update'
- rm -fv $CABALHOME/packages/hackage.haskell.org/00-index.*
- rm -fv $CABALHOME/packages/hackage.haskell.org/*.json
- rm -fv $CABALHOME/packages/hackage.haskell.org/01-index.cache
- rm -fv $CABALHOME/packages/hackage.haskell.org/01-index.tar
- rm -fv $CABALHOME/packages/hackage.haskell.org/01-index.tar.idx
- rm -rfv $CABALHOME/packages/head.hackage
jobs:
include:
- compiler: ghc-8.10.1
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-8.10.1","cabal-install-3.2","zlib1g-dev"]}}
os: linux
- compiler: ghc-8.8.3
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-8.8.3","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-8.6.5
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-8.6.5","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-8.4.4
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-8.4.4","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-8.2.2
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-8.2.2","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-8.0.2
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-8.0.2","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-8.0.1
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-8.0.1","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-7.10.3
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-7.10.3","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-7.8.4
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-7.8.4","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-7.6.3
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-7.6.3","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-7.4.2
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-7.4.2","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-7.2.2
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-7.2.2","cabal-install-3.0","zlib1g-dev"]}}
os: linux
- compiler: ghc-7.0.4
addons: {"apt":{"sources":[{"sourceline":"deb http://ppa.launchpad.net/hvr/ghc/ubuntu xenial main","key_url":"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286"}],"packages":["ghc-7.0.4","cabal-install-3.0","zlib1g-dev"]}}
os: linux
before_install:
- HC=$(echo "/opt/$CC/bin/ghc" | sed 's/-/\//')
- WITHCOMPILER="-w $HC"
- HADDOCK=$(echo "/opt/$CC/bin/haddock" | sed 's/-/\//')
- HCPKG="$HC-pkg"
- unset CC
- CABAL=/opt/ghc/bin/cabal
- CABALHOME=$HOME/.cabal
- export PATH="$CABALHOME/bin:$PATH"
- TOP=$(pwd)
- "HCNUMVER=$(${HC} --numeric-version|perl -ne '/^(\\d+)\\.(\\d+)\\.(\\d+)(\\.(\\d+))?$/; print(10000 * $1 + 100 * $2 + ($3 == 0 ? $5 != 1 : $3))')"
- echo $HCNUMVER
- CABAL="$CABAL -vnormal+nowrap"
- set -o pipefail
- TEST=--enable-tests
- if [ $HCNUMVER -lt 70400 ] ; then TEST=--disable-tests ; fi
- BENCH=--enable-benchmarks
- HEADHACKAGE=false
- rm -f $CABALHOME/config
- |
echo "verbose: normal +nowrap +markoutput" >> $CABALHOME/config
echo "remote-build-reporting: anonymous" >> $CABALHOME/config
echo "write-ghc-environment-files: always" >> $CABALHOME/config
echo "remote-repo-cache: $CABALHOME/packages" >> $CABALHOME/config
echo "logs-dir: $CABALHOME/logs" >> $CABALHOME/config
echo "world-file: $CABALHOME/world" >> $CABALHOME/config
echo "extra-prog-path: $CABALHOME/bin" >> $CABALHOME/config
echo "symlink-bindir: $CABALHOME/bin" >> $CABALHOME/config
echo "installdir: $CABALHOME/bin" >> $CABALHOME/config
echo "build-summary: $CABALHOME/logs/build.log" >> $CABALHOME/config
echo "store-dir: $CABALHOME/store" >> $CABALHOME/config
echo "install-dirs user" >> $CABALHOME/config
echo " prefix: $CABALHOME" >> $CABALHOME/config
echo "repository hackage.haskell.org" >> $CABALHOME/config
echo " url: http://hackage.haskell.org/" >> $CABALHOME/config
install:
- ${CABAL} --version
- echo "$(${HC} --version) [$(${HC} --print-project-git-commit-id 2> /dev/null || echo '?')]"
- |
echo "program-default-options" >> $CABALHOME/config
echo " ghc-options: $GHCJOBS +RTS -M6G -RTS" >> $CABALHOME/config
- cat $CABALHOME/config
- rm -fv cabal.project cabal.project.local cabal.project.freeze
- travis_retry ${CABAL} v2-update -v
# Generate cabal.project
- rm -rf cabal.project cabal.project.local cabal.project.freeze
- touch cabal.project
- |
echo "packages: ." >> cabal.project
- |
- "for pkg in $($HCPKG list --simple-output); do echo $pkg | sed 's/-[^-]*$//' | (grep -vE -- '^(zlib)$' || true) | sed 's/^/constraints: /' | sed 's/$/ installed/' >> cabal.project.local; done"
- cat cabal.project || true
- cat cabal.project.local || true
- if [ -f "./configure.ac" ]; then (cd "." && autoreconf -i); fi
- ${CABAL} v2-freeze $WITHCOMPILER ${TEST} ${BENCH}
- "cat cabal.project.freeze | sed -E 's/^(constraints: *| *)//' | sed 's/any.//'"
- rm cabal.project.freeze
- travis_wait 40 ${CABAL} v2-build $WITHCOMPILER ${TEST} ${BENCH} --dep -j2 all
- travis_wait 40 ${CABAL} v2-build $WITHCOMPILER --disable-tests --disable-benchmarks --dep -j2 all
script:
- DISTDIR=$(mktemp -d /tmp/dist-test.XXXX)
# Packaging...
- ${CABAL} v2-sdist all
# Unpacking...
- mv dist-newstyle/sdist/*.tar.gz ${DISTDIR}/
- cd ${DISTDIR} || false
- find . -maxdepth 1 -type f -name '*.tar.gz' -exec tar -xvf '{}' \;
- find . -maxdepth 1 -type f -name '*.tar.gz' -exec rm '{}' \;
- PKGDIR_zlib="$(find . -maxdepth 1 -type d -regex '.*/zlib-[0-9.]*')"
# Generate cabal.project
- rm -rf cabal.project cabal.project.local cabal.project.freeze
- touch cabal.project
- |
echo "packages: ${PKGDIR_zlib}" >> cabal.project
- |
- "for pkg in $($HCPKG list --simple-output); do echo $pkg | sed 's/-[^-]*$//' | (grep -vE -- '^(zlib)$' || true) | sed 's/^/constraints: /' | sed 's/$/ installed/' >> cabal.project.local; done"
- cat cabal.project || true
- cat cabal.project.local || true
# Building...
# this builds all libraries and executables (without tests/benchmarks)
- ${CABAL} v2-build $WITHCOMPILER --disable-tests --disable-benchmarks all
# Building with tests and benchmarks...
# build & run tests, build benchmarks
- ${CABAL} v2-build $WITHCOMPILER ${TEST} ${BENCH} all
# Testing...
- if [ $HCNUMVER -ge 70400 ] ; then ${CABAL} v2-test $WITHCOMPILER ${TEST} ${BENCH} all ; fi
# cabal check...
- (cd ${PKGDIR_zlib} && ${CABAL} -vnormal check)
# haddock...
- ${CABAL} v2-haddock $WITHCOMPILER --with-haddock $HADDOCK ${TEST} ${BENCH} all
# Building without installed constraints for packages in global-db...
- rm -f cabal.project.local
- ${CABAL} v2-build $WITHCOMPILER --disable-tests --disable-benchmarks all
# REGENDATA ("0.9.20200325",["-o",".travis.yml","--irc-channels=irc.freenode.org#hackage","zlib.cabal","--apt=zlib1g-dev","--tests-jobs=>=7.4"])
# EOF

View File

@@ -1,128 +0,0 @@
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) 2006-2014 Duncan Coutts
-- License : BSD-style
--
-- Maintainer : duncan@community.haskell.org
--
-- Compression and decompression of data streams in the gzip format.
--
-- The format is described in detail in RFC #1952:
-- <http://www.ietf.org/rfc/rfc1952.txt>
--
-- See also the zlib home page: <http://zlib.net/>
--
-----------------------------------------------------------------------------
module Codec.Compression.GZip (
-- | This module provides pure functions for compressing and decompressing
-- streams of data in the gzip format and represented by lazy 'ByteString's.
-- This makes it easy to use either in memory or with disk or network IO.
--
-- For example a simple gzip compression program is just:
--
-- > import qualified Data.ByteString.Lazy as ByteString
-- > import qualified Codec.Compression.GZip as GZip
-- >
-- > main = ByteString.interact GZip.compress
--
-- Or you could lazily read in and decompress a @.gz@ file using:
--
-- > content <- fmap GZip.decompress (readFile file)
--
-- * Simple compression and decompression
compress,
decompress,
-- * Extended api with control over compression parameters
compressWith,
decompressWith,
CompressParams(..), defaultCompressParams,
DecompressParams(..), defaultDecompressParams,
-- ** The compression parameter types
CompressionLevel(..),
defaultCompression,
noCompression,
bestSpeed,
bestCompression,
compressionLevel,
Method(..),
deflateMethod,
WindowBits(..),
defaultWindowBits,
windowBits,
MemoryLevel(..),
defaultMemoryLevel,
minMemoryLevel,
maxMemoryLevel,
memoryLevel,
CompressionStrategy(..),
defaultStrategy,
filteredStrategy,
huffmanOnlyStrategy,
) where
import Data.ByteString.Lazy (ByteString)
import qualified Codec.Compression.Zlib.Internal as Internal
import Codec.Compression.Zlib.Internal hiding (compress, decompress)
-- | Decompress a stream of data in the gzip format.
--
-- There are a number of errors that can occur. In each case an exception will
-- be thrown. The possible error conditions are:
--
-- * if the stream does not start with a valid gzip header
--
-- * if the compressed stream is corrupted
--
-- * if the compressed stream ends permaturely
--
-- Note that the decompression is performed /lazily/. Errors in the data stream
-- may not be detected until the end of the stream is demanded (since it is
-- only at the end that the final checksum can be checked). If this is
-- important to you, you must make sure to consume the whole decompressed
-- stream before doing any IO action that depends on it.
--
decompress :: ByteString -> ByteString
decompress = decompressWith defaultDecompressParams
-- | Like 'decompress' but with the ability to specify various decompression
-- parameters. Typical usage:
--
-- > decompressWith defaultCompressParams { ... }
--
decompressWith :: DecompressParams -> ByteString -> ByteString
decompressWith = Internal.decompress gzipFormat
-- | Compress a stream of data into the gzip format.
--
-- This uses the default compression parameters. In partiular it uses the
-- default compression level which favours a higher compression ratio over
-- compression speed, though it does not use the maximum compression level.
--
-- Use 'compressWith' to adjust the compression level or other compression
-- parameters.
--
compress :: ByteString -> ByteString
compress = compressWith defaultCompressParams
-- | Like 'compress' but with the ability to specify various compression
-- parameters. Typical usage:
--
-- > compressWith defaultCompressParams { ... }
--
-- In particular you can set the compression level:
--
-- > compressWith defaultCompressParams { compressLevel = BestCompression }
--
compressWith :: CompressParams -> ByteString -> ByteString
compressWith = Internal.compress gzipFormat

View File

@@ -1,116 +0,0 @@
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) 2006-2014 Duncan Coutts
-- License : BSD-style
--
-- Maintainer : duncan@community.haskell.org
--
-- Compression and decompression of data streams in the zlib format.
--
-- The format is described in detail in RFC #1950:
-- <http://www.ietf.org/rfc/rfc1950.txt>
--
-- See also the zlib home page: <http://zlib.net/>
--
-----------------------------------------------------------------------------
module Codec.Compression.Zlib (
-- | This module provides pure functions for compressing and decompressing
-- streams of data in the zlib format and represented by lazy 'ByteString's.
-- This makes it easy to use either in memory or with disk or network IO.
-- * Simple compression and decompression
compress,
decompress,
-- * Extended api with control over compression parameters
compressWith,
decompressWith,
CompressParams(..), defaultCompressParams,
DecompressParams(..), defaultDecompressParams,
-- ** The compression parameter types
CompressionLevel(..),
defaultCompression,
noCompression,
bestSpeed,
bestCompression,
compressionLevel,
Method(..),
deflateMethod,
WindowBits(..),
defaultWindowBits,
windowBits,
MemoryLevel(..),
defaultMemoryLevel,
minMemoryLevel,
maxMemoryLevel,
memoryLevel,
CompressionStrategy(..),
defaultStrategy,
filteredStrategy,
huffmanOnlyStrategy,
) where
import Data.ByteString.Lazy (ByteString)
import qualified Codec.Compression.Zlib.Internal as Internal
import Codec.Compression.Zlib.Internal hiding (compress, decompress)
-- | Decompress a stream of data in the zlib format.
--
-- There are a number of errors that can occur. In each case an exception will
-- be thrown. The possible error conditions are:
--
-- * if the stream does not start with a valid gzip header
--
-- * if the compressed stream is corrupted
--
-- * if the compressed stream ends permaturely
--
-- Note that the decompression is performed /lazily/. Errors in the data stream
-- may not be detected until the end of the stream is demanded (since it is
-- only at the end that the final checksum can be checked). If this is
-- important to you, you must make sure to consume the whole decompressed
-- stream before doing any IO action that depends on it.
--
decompress :: ByteString -> ByteString
decompress = decompressWith defaultDecompressParams
-- | Like 'decompress' but with the ability to specify various decompression
-- parameters. Typical usage:
--
-- > decompressWith defaultCompressParams { ... }
--
decompressWith :: DecompressParams -> ByteString -> ByteString
decompressWith = Internal.decompress zlibFormat
-- | Compress a stream of data into the zlib format.
--
-- This uses the default compression parameters. In partiular it uses the
-- default compression level which favours a higher compression ratio over
-- compression speed, though it does not use the maximum compression level.
--
-- Use 'compressWith' to adjust the compression level or other compression
-- parameters.
--
compress :: ByteString -> ByteString
compress = compressWith defaultCompressParams
-- | Like 'compress' but with the ability to specify various compression
-- parameters. Typical usage:
--
-- > compressWith defaultCompressParams { ... }
--
-- In particular you can set the compression level:
--
-- > compressWith defaultCompressParams { compressLevel = BestCompression }
--
compressWith :: CompressParams -> ByteString -> ByteString
compressWith = Internal.compress zlibFormat

View File

@@ -1,953 +0,0 @@
{-# LANGUAGE CPP, RankNTypes, DeriveDataTypeable, BangPatterns #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) 2006-2015 Duncan Coutts
-- License : BSD-style
--
-- Maintainer : duncan@community.haskell.org
--
-- Pure and IO stream based interfaces to lower level zlib wrapper
--
-----------------------------------------------------------------------------
module Codec.Compression.Zlib.Internal (
-- * Pure interface
compress,
decompress,
-- * Monadic incremental interface
-- $incremental-compression
-- ** Using incremental compression
-- $using-incremental-compression
CompressStream(..),
compressST,
compressIO,
foldCompressStream,
foldCompressStreamWithInput,
-- ** Using incremental decompression
-- $using-incremental-decompression
DecompressStream(..),
DecompressError(..),
decompressST,
decompressIO,
foldDecompressStream,
foldDecompressStreamWithInput,
-- * The compression parameter types
CompressParams(..),
defaultCompressParams,
DecompressParams(..),
defaultDecompressParams,
Stream.Format(..),
Stream.gzipFormat,
Stream.zlibFormat,
Stream.rawFormat,
Stream.gzipOrZlibFormat,
Stream.CompressionLevel(..),
Stream.defaultCompression,
Stream.noCompression,
Stream.bestSpeed,
Stream.bestCompression,
Stream.compressionLevel,
Stream.Method(..),
Stream.deflateMethod,
Stream.WindowBits(..),
Stream.defaultWindowBits,
Stream.windowBits,
Stream.MemoryLevel(..),
Stream.defaultMemoryLevel,
Stream.minMemoryLevel,
Stream.maxMemoryLevel,
Stream.memoryLevel,
Stream.CompressionStrategy(..),
Stream.defaultStrategy,
Stream.filteredStrategy,
Stream.huffmanOnlyStrategy,
) where
import Prelude hiding (length)
import Control.Monad (when)
import Control.Exception (Exception, throw, assert)
import Control.Monad.ST.Lazy hiding (stToIO)
import Control.Monad.ST.Strict (stToIO)
#if __GLASGOW_HASKELL__ >= 702
import qualified Control.Monad.ST.Unsafe as Unsafe (unsafeIOToST)
#else
import qualified Control.Monad.ST.Strict as Unsafe (unsafeIOToST)
#endif
import Data.Typeable (Typeable)
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Internal as L
import qualified Data.ByteString as S
import qualified Data.ByteString.Internal as S
import Data.Word (Word8)
import GHC.IO (noDuplicate)
import qualified Codec.Compression.Zlib.Stream as Stream
import Codec.Compression.Zlib.Stream (Stream)
-- | The full set of parameters for compression. The defaults are
-- 'defaultCompressParams'.
--
-- The 'compressBufferSize' is the size of the first output buffer containing
-- the compressed data. If you know an approximate upper bound on the size of
-- the compressed data then setting this parameter can save memory. The default
-- compression output buffer size is @16k@. If your extimate is wrong it does
-- not matter too much, the default buffer size will be used for the remaining
-- chunks.
--
data CompressParams = CompressParams {
compressLevel :: !Stream.CompressionLevel,
compressMethod :: !Stream.Method,
compressWindowBits :: !Stream.WindowBits,
compressMemoryLevel :: !Stream.MemoryLevel,
compressStrategy :: !Stream.CompressionStrategy,
compressBufferSize :: !Int,
compressDictionary :: Maybe S.ByteString
} deriving Show
-- | The full set of parameters for decompression. The defaults are
-- 'defaultDecompressParams'.
--
-- The 'decompressBufferSize' is the size of the first output buffer,
-- containing the uncompressed data. If you know an exact or approximate upper
-- bound on the size of the decompressed data then setting this parameter can
-- save memory. The default decompression output buffer size is @32k@. If your
-- extimate is wrong it does not matter too much, the default buffer size will
-- be used for the remaining chunks.
--
-- One particular use case for setting the 'decompressBufferSize' is if you
-- know the exact size of the decompressed data and want to produce a strict
-- 'Data.ByteString.ByteString'. The compression and deccompression functions
-- use lazy 'Data.ByteString.Lazy.ByteString's but if you set the
-- 'decompressBufferSize' correctly then you can generate a lazy
-- 'Data.ByteString.Lazy.ByteString' with exactly one chunk, which can be
-- converted to a strict 'Data.ByteString.ByteString' in @O(1)@ time using
-- @'Data.ByteString.concat' . 'Data.ByteString.Lazy.toChunks'@.
--
data DecompressParams = DecompressParams {
decompressWindowBits :: !Stream.WindowBits,
decompressBufferSize :: !Int,
decompressDictionary :: Maybe S.ByteString,
decompressAllMembers :: Bool
} deriving Show
-- | The default set of parameters for compression. This is typically used with
-- the @compressWith@ function with specific parameters overridden.
--
defaultCompressParams :: CompressParams
defaultCompressParams = CompressParams {
compressLevel = Stream.defaultCompression,
compressMethod = Stream.deflateMethod,
compressWindowBits = Stream.defaultWindowBits,
compressMemoryLevel = Stream.defaultMemoryLevel,
compressStrategy = Stream.defaultStrategy,
compressBufferSize = defaultCompressBufferSize,
compressDictionary = Nothing
}
-- | The default set of parameters for decompression. This is typically used with
-- the @compressWith@ function with specific parameters overridden.
--
defaultDecompressParams :: DecompressParams
defaultDecompressParams = DecompressParams {
decompressWindowBits = Stream.defaultWindowBits,
decompressBufferSize = defaultDecompressBufferSize,
decompressDictionary = Nothing,
decompressAllMembers = True
}
-- | The default chunk sizes for the output of compression and decompression
-- are 16k and 32k respectively (less a small accounting overhead).
--
defaultCompressBufferSize, defaultDecompressBufferSize :: Int
defaultCompressBufferSize = 16 * 1024 - L.chunkOverhead
defaultDecompressBufferSize = 32 * 1024 - L.chunkOverhead
-- | The unfolding of the decompression process, where you provide a sequence
-- of compressed data chunks as input and receive a sequence of uncompressed
-- data chunks as output. The process is incremental, in that the demand for
-- input and provision of output are interleaved.
--
-- To indicate the end of the input supply an empty input chunk. Note that
-- for 'gzipFormat' with the default 'decompressAllMembers' @True@ you will
-- have to do this, as the decompressor will look for any following members.
-- With 'decompressAllMembers' @False@ the decompressor knows when the data
-- ends and will produce 'DecompressStreamEnd' without you having to supply an
-- empty chunk to indicate the end of the input.
--
data DecompressStream m =
DecompressInputRequired {
decompressSupplyInput :: S.ByteString -> m (DecompressStream m)
}
| DecompressOutputAvailable {
decompressOutput :: !S.ByteString,
decompressNext :: m (DecompressStream m)
}
-- | Includes any trailing unconsumed /input/ data.
| DecompressStreamEnd {
decompressUnconsumedInput :: S.ByteString
}
-- | An error code
| DecompressStreamError {
decompressStreamError :: DecompressError
}
-- | The possible error cases when decompressing a stream.
--
-- This can be 'show'n to give a human readable error message.
--
data DecompressError =
-- | The compressed data stream ended prematurely. This may happen if the
-- input data stream was truncated.
TruncatedInput
-- | It is possible to do zlib compression with a custom dictionary. This
-- allows slightly higher compression ratios for short files. However such
-- compressed streams require the same dictionary when decompressing. This
-- error is for when we encounter a compressed stream that needs a
-- dictionary, and it's not provided.
| DictionaryRequired
-- | If the stream requires a dictionary and you provide one with the
-- wrong 'DictionaryHash' then you will get this error.
| DictionaryMismatch
-- | If the compressed data stream is corrupted in any way then you will
-- get this error, for example if the input data just isn't a compressed
-- zlib data stream. In particular if the data checksum turns out to be
-- wrong then you will get all the decompressed data but this error at the
-- end, instead of the normal sucessful 'StreamEnd'.
| DataFormatError String
deriving (Eq, Typeable)
instance Show DecompressError where
show TruncatedInput = modprefix "premature end of compressed data stream"
show DictionaryRequired = modprefix "compressed data stream requires custom dictionary"
show DictionaryMismatch = modprefix "given dictionary does not match the expected one"
show (DataFormatError detail) = modprefix ("compressed data stream format error (" ++ detail ++ ")")
modprefix :: ShowS
modprefix = ("Codec.Compression.Zlib: " ++)
instance Exception DecompressError
-- | A fold over the 'DecompressStream' in the given monad.
--
-- One way to look at this is that it runs the stream, using callback functions
-- for the four stream events.
--
foldDecompressStream :: Monad m
=> ((S.ByteString -> m a) -> m a)
-> (S.ByteString -> m a -> m a)
-> (S.ByteString -> m a)
-> (DecompressError -> m a)
-> DecompressStream m -> m a
foldDecompressStream input output end err = fold
where
fold (DecompressInputRequired next) =
input (\x -> next x >>= fold)
fold (DecompressOutputAvailable outchunk next) =
output outchunk (next >>= fold)
fold (DecompressStreamEnd inchunk) = end inchunk
fold (DecompressStreamError derr) = err derr
-- | A variant on 'foldCompressStream' that is pure rather than operating in a
-- monad and where the input is provided by a lazy 'L.ByteString'. So we only
-- have to deal with the output, end and error parts, making it like a foldr on
-- a list of output chunks.
--
-- For example:
--
-- > toChunks = foldDecompressStreamWithInput (:) [] throw
--
foldDecompressStreamWithInput :: (S.ByteString -> a -> a)
-> (L.ByteString -> a)
-> (DecompressError -> a)
-> (forall s. DecompressStream (ST s))
-> L.ByteString
-> a
foldDecompressStreamWithInput chunk end err = \s lbs ->
runST (fold s (L.toChunks lbs))
where
fold (DecompressInputRequired next) [] =
next S.empty >>= \strm -> fold strm []
fold (DecompressInputRequired next) (inchunk:inchunks) =
next inchunk >>= \s -> fold s inchunks
fold (DecompressOutputAvailable outchunk next) inchunks = do
r <- next >>= \s -> fold s inchunks
return $ chunk outchunk r
fold (DecompressStreamEnd inchunk) inchunks =
return $ end (L.fromChunks (inchunk:inchunks))
fold (DecompressStreamError derr) _ =
return $ err derr
-- $incremental-compression
-- The pure 'compress' and 'decompress' functions are streaming in the sense
-- that they can produce output without demanding all input, however they need
-- the input data stream as a lazy 'L.ByteString'. Having the input data
-- stream as a lazy 'L.ByteString' often requires using lazy I\/O which is not
-- appropriate in all cicumstances.
--
-- For these cases an incremental interface is more appropriate. This interface
-- allows both incremental input and output. Chunks of input data are supplied
-- one by one (e.g. as they are obtained from an input source like a file or
-- network source). Output is also produced chunk by chunk.
--
-- The incremental input and output is managed via the 'CompressStream' and
-- 'DecompressStream' types. They represents the unfolding of the process of
-- compressing and decompressing. They operates in either the 'ST' or 'IO'
-- monads. They can be lifted into other incremental abstractions like pipes or
-- conduits, or they can be used directly in the following style.
-- $using-incremental-compression
--
-- In a loop:
--
-- * Inspect the status of the stream
--
-- * When it is 'CompressInputRequired' then you should call the action,
-- passing a chunk of input (or 'BS.empty' when no more input is available)
-- to get the next state of the stream and continue the loop.
--
-- * When it is 'CompressOutputAvailable' then do something with the given
-- chunk of output, and call the action to get the next state of the stream
-- and continue the loop.
--
-- * When it is 'CompressStreamEnd' then terminate the loop.
--
-- Note that you cannot stop as soon as you have no more input, you need to
-- carry on until all the output has been collected, i.e. until you get to
-- 'CompressStreamEnd'.
--
-- Here is an example where we get input from one file handle and send the
-- compressed output to another file handle.
--
-- > go :: Handle -> Handle -> CompressStream IO -> IO ()
-- > go inh outh (CompressInputRequired next) = do
-- > inchunk <- BS.hGet inh 4096
-- > go inh outh =<< next inchunk
-- > go inh outh (CompressOutputAvailable outchunk next) =
-- > BS.hPut outh outchunk
-- > go inh outh =<< next
-- > go _ _ CompressStreamEnd = return ()
--
-- The same can be achieved with 'foldCompressStream':
--
-- > foldCompressStream
-- > (\next -> do inchunk <- BS.hGet inh 4096; next inchunk)
-- > (\outchunk next -> do BS.hPut outh outchunk; next)
-- > (return ())
-- $using-incremental-decompression
--
-- The use of 'DecompressStream' is very similar to 'CompressStream' but with
-- a few differences:
--
-- * There is the extra possibility of a 'DecompressStreamError'
--
-- * There can be extra trailing data after a compressed stream, and the
-- 'DecompressStreamEnd' includes that.
--
-- Otherwise the same loop style applies, and there are fold functions.
-- | The unfolding of the compression process, where you provide a sequence
-- of uncompressed data chunks as input and receive a sequence of compressed
-- data chunks as output. The process is incremental, in that the demand for
-- input and provision of output are interleaved.
--
data CompressStream m =
CompressInputRequired {
compressSupplyInput :: S.ByteString -> m (CompressStream m)
}
| CompressOutputAvailable {
compressOutput :: !S.ByteString,
compressNext :: m (CompressStream m)
}
| CompressStreamEnd
-- | A fold over the 'CompressStream' in the given monad.
--
-- One way to look at this is that it runs the stream, using callback functions
-- for the three stream events.
--
foldCompressStream :: Monad m
=> ((S.ByteString -> m a) -> m a)
-> (S.ByteString -> m a -> m a)
-> m a
-> CompressStream m -> m a
foldCompressStream input output end = fold
where
fold (CompressInputRequired next) =
input (\x -> next x >>= fold)
fold (CompressOutputAvailable outchunk next) =
output outchunk (next >>= fold)
fold CompressStreamEnd =
end
-- | A variant on 'foldCompressStream' that is pure rather than operating in a
-- monad and where the input is provided by a lazy 'L.ByteString'. So we only
-- have to deal with the output and end parts, making it just like a foldr on a
-- list of output chunks.
--
-- For example:
--
-- > toChunks = foldCompressStreamWithInput (:) []
--
foldCompressStreamWithInput :: (S.ByteString -> a -> a)
-> a
-> (forall s. CompressStream (ST s))
-> L.ByteString
-> a
foldCompressStreamWithInput chunk end = \s lbs ->
runST (fold s (L.toChunks lbs))
where
fold (CompressInputRequired next) [] =
next S.empty >>= \strm -> fold strm []
fold (CompressInputRequired next) (inchunk:inchunks) =
next inchunk >>= \s -> fold s inchunks
fold (CompressOutputAvailable outchunk next) inchunks = do
r <- next >>= \s -> fold s inchunks
return $ chunk outchunk r
fold CompressStreamEnd _inchunks =
return end
-- | Compress a data stream provided as a lazy 'L.ByteString'.
--
-- There are no expected error conditions. All input data streams are valid. It
-- is possible for unexpected errors to occur, such as running out of memory,
-- or finding the wrong version of the zlib C library, these are thrown as
-- exceptions.
--
compress :: Stream.Format -> CompressParams -> L.ByteString -> L.ByteString
-- | Incremental compression in the 'ST' monad. Using 'ST' makes it possible
-- to write pure /lazy/ functions while making use of incremental compression.
--
compressST :: Stream.Format -> CompressParams -> CompressStream (ST s)
-- | Incremental compression in the 'IO' monad.
--
compressIO :: Stream.Format -> CompressParams -> CompressStream IO
compress format params = foldCompressStreamWithInput
L.Chunk L.Empty
(compressStreamST format params)
compressST format params = compressStreamST format params
compressIO format params = compressStreamIO format params
compressStream :: Stream.Format -> CompressParams -> S.ByteString
-> Stream (CompressStream Stream)
compressStream format (CompressParams compLevel method bits memLevel
strategy initChunkSize mdict) =
\chunk -> do
Stream.deflateInit format compLevel method bits memLevel strategy
setDictionary mdict
case chunk of
_ | S.null chunk ->
fillBuffers 20 --gzip header is 20 bytes, others even smaller
S.PS inFPtr offset length -> do
Stream.pushInputBuffer inFPtr offset length
fillBuffers initChunkSize
where
-- we flick between two states:
-- * where one or other buffer is empty
-- - in which case we refill one or both
-- * where both buffers are non-empty
-- - in which case we compress until a buffer is empty
fillBuffers :: Int -> Stream (CompressStream Stream)
fillBuffers outChunkSize = do
#ifdef DEBUG
Stream.consistencyCheck
#endif
-- in this state there are two possabilities:
-- * no outbut buffer space is available
-- - in which case we must make more available
-- * no input buffer is available
-- - in which case we must supply more
inputBufferEmpty <- Stream.inputBufferEmpty
outputBufferFull <- Stream.outputBufferFull
assert (inputBufferEmpty || outputBufferFull) $ return ()
when outputBufferFull $ do
outFPtr <- Stream.unsafeLiftIO (S.mallocByteString outChunkSize)
Stream.pushOutputBuffer outFPtr 0 outChunkSize
if inputBufferEmpty
then return $ CompressInputRequired $ \chunk ->
case chunk of
_ | S.null chunk -> drainBuffers True
S.PS inFPtr offset length -> do
Stream.pushInputBuffer inFPtr offset length
drainBuffers False
else drainBuffers False
drainBuffers :: Bool -> Stream (CompressStream Stream)
drainBuffers lastChunk = do
inputBufferEmpty' <- Stream.inputBufferEmpty
outputBufferFull' <- Stream.outputBufferFull
assert(not outputBufferFull'
&& (lastChunk || not inputBufferEmpty')) $ return ()
-- this invariant guarantees we can always make forward progress
-- and that therefore a BufferError is impossible
let flush = if lastChunk then Stream.Finish else Stream.NoFlush
status <- Stream.deflate flush
case status of
Stream.Ok -> do
outputBufferFull <- Stream.outputBufferFull
if outputBufferFull
then do (outFPtr, offset, length) <- Stream.popOutputBuffer
let chunk = S.PS outFPtr offset length
return $ CompressOutputAvailable chunk $ do
fillBuffers defaultCompressBufferSize
else do fillBuffers defaultCompressBufferSize
Stream.StreamEnd -> do
inputBufferEmpty <- Stream.inputBufferEmpty
assert inputBufferEmpty $ return ()
outputBufferBytesAvailable <- Stream.outputBufferBytesAvailable
if outputBufferBytesAvailable > 0
then do (outFPtr, offset, length) <- Stream.popOutputBuffer
let chunk = S.PS outFPtr offset length
Stream.finalise
return $ CompressOutputAvailable chunk (return CompressStreamEnd)
else do Stream.finalise
return CompressStreamEnd
Stream.Error code msg -> case code of
Stream.BufferError -> fail "BufferError should be impossible!"
Stream.NeedDict _ -> fail "NeedDict is impossible!"
_ -> fail msg
-- Set the custom dictionary, if we were provided with one
-- and if the format supports it (zlib and raw, not gzip).
setDictionary :: Maybe S.ByteString -> Stream ()
setDictionary (Just dict)
| Stream.formatSupportsDictionary format = do
status <- Stream.deflateSetDictionary dict
case status of
Stream.Ok -> return ()
Stream.Error _ msg -> fail msg
_ -> fail "error when setting deflate dictionary"
setDictionary _ = return ()
-- | Decompress a data stream provided as a lazy 'L.ByteString'.
--
-- It will throw an exception if any error is encountered in the input data.
-- If you need more control over error handling then use one the incremental
-- versions, 'decompressST' or 'decompressIO'.
--
decompress :: Stream.Format -> DecompressParams -> L.ByteString -> L.ByteString
-- | Incremental decompression in the 'ST' monad. Using 'ST' makes it possible
-- to write pure /lazy/ functions while making use of incremental decompression.
--
decompressST :: Stream.Format -> DecompressParams -> DecompressStream (ST s)
-- | Incremental decompression in the 'IO' monad.
--
decompressIO :: Stream.Format -> DecompressParams -> DecompressStream IO
decompress format params = foldDecompressStreamWithInput
L.Chunk (const L.Empty) throw
(decompressStreamST format params)
decompressST format params = decompressStreamST format params
decompressIO format params = decompressStreamIO format params
decompressStream :: Stream.Format -> DecompressParams
-> Bool -> S.ByteString
-> Stream (DecompressStream Stream)
decompressStream format (DecompressParams bits initChunkSize mdict allMembers)
resume =
\chunk -> do
inputBufferEmpty <- Stream.inputBufferEmpty
outputBufferFull <- Stream.outputBufferFull
assert inputBufferEmpty $
if resume then assert (format == Stream.gzipFormat && allMembers) $
Stream.inflateReset
else assert outputBufferFull $
Stream.inflateInit format bits
case chunk of
_ | S.null chunk -> do
-- special case to avoid demanding more input again
-- always an error anyway
when outputBufferFull $ do
let outChunkSize = 1
outFPtr <- Stream.unsafeLiftIO (S.mallocByteString outChunkSize)
Stream.pushOutputBuffer outFPtr 0 outChunkSize
drainBuffers True
S.PS inFPtr offset length -> do
Stream.pushInputBuffer inFPtr offset length
-- Normally we start with no output buffer (so counts as full) but
-- if we're resuming then we'll usually still have output buffer
-- space available
assert (if not resume then outputBufferFull else True) $ return ()
if outputBufferFull
then fillBuffers initChunkSize
else drainBuffers False
where
-- we flick between two states:
-- * where one or other buffer is empty
-- - in which case we refill one or both
-- * where both buffers are non-empty
-- - in which case we compress until a buffer is empty
fillBuffers :: Int
-> Stream (DecompressStream Stream)
fillBuffers outChunkSize = do
#ifdef DEBUG
Stream.consistencyCheck
#endif
-- in this state there are two possabilities:
-- * no outbut buffer space is available
-- - in which case we must make more available
-- * no input buffer is available
-- - in which case we must supply more
inputBufferEmpty <- Stream.inputBufferEmpty
outputBufferFull <- Stream.outputBufferFull
assert (inputBufferEmpty || outputBufferFull) $ return ()
when outputBufferFull $ do
outFPtr <- Stream.unsafeLiftIO (S.mallocByteString outChunkSize)
Stream.pushOutputBuffer outFPtr 0 outChunkSize
if inputBufferEmpty
then return $ DecompressInputRequired $ \chunk ->
case chunk of
_ | S.null chunk -> drainBuffers True
S.PS inFPtr offset length -> do
Stream.pushInputBuffer inFPtr offset length
drainBuffers False
else drainBuffers False
drainBuffers :: Bool -> Stream (DecompressStream Stream)
drainBuffers lastChunk = do
inputBufferEmpty' <- Stream.inputBufferEmpty
outputBufferFull' <- Stream.outputBufferFull
assert(not outputBufferFull'
&& (lastChunk || not inputBufferEmpty')) $ return ()
-- this invariant guarantees we can always make forward progress or at
-- least if a BufferError does occur that it must be due to a premature EOF
status <- Stream.inflate Stream.NoFlush
case status of
Stream.Ok -> do
outputBufferFull <- Stream.outputBufferFull
if outputBufferFull
then do (outFPtr, offset, length) <- Stream.popOutputBuffer
let chunk = S.PS outFPtr offset length
return $ DecompressOutputAvailable chunk $ do
fillBuffers defaultDecompressBufferSize
else do fillBuffers defaultDecompressBufferSize
Stream.StreamEnd -> do
-- The decompressor tells us we're done.
-- Note that there may be input bytes still available if the stream is
-- embeded in some other data stream, so we return any trailing data.
inputBufferEmpty <- Stream.inputBufferEmpty
if inputBufferEmpty
then do finish (DecompressStreamEnd S.empty)
else do (inFPtr, offset, length) <- Stream.popRemainingInputBuffer
let inchunk = S.PS inFPtr offset length
finish (DecompressStreamEnd inchunk)
Stream.Error code msg -> case code of
Stream.BufferError -> finish (DecompressStreamError TruncatedInput)
Stream.NeedDict adler -> do
err <- setDictionary adler mdict
case err of
Just streamErr -> finish streamErr
Nothing -> drainBuffers lastChunk
Stream.DataError -> finish (DecompressStreamError (DataFormatError msg))
_ -> fail msg
-- Note even if we end with an error we still try to flush the last chunk if
-- there is one. The user just has to decide what they want to trust.
finish end = do
outputBufferBytesAvailable <- Stream.outputBufferBytesAvailable
if outputBufferBytesAvailable > 0
then do (outFPtr, offset, length) <- Stream.popOutputBuffer
return (DecompressOutputAvailable (S.PS outFPtr offset length) (return end))
else return end
setDictionary :: Stream.DictionaryHash -> Maybe S.ByteString
-> Stream (Maybe (DecompressStream Stream))
setDictionary _adler Nothing =
return $ Just (DecompressStreamError DictionaryRequired)
setDictionary _adler (Just dict) = do
status <- Stream.inflateSetDictionary dict
case status of
Stream.Ok -> return Nothing
Stream.Error Stream.DataError _ ->
return $ Just (DecompressStreamError DictionaryMismatch)
_ -> fail "error when setting inflate dictionary"
------------------------------------------------------------------------------
mkStateST :: ST s (Stream.State s)
mkStateIO :: IO (Stream.State RealWorld)
mkStateST = strictToLazyST Stream.mkState
mkStateIO = stToIO Stream.mkState
runStreamST :: Stream a -> Stream.State s -> ST s (a, Stream.State s)
runStreamIO :: Stream a -> Stream.State RealWorld -> IO (a, Stream.State RealWorld)
runStreamST strm zstate = strictToLazyST (Unsafe.unsafeIOToST noDuplicate >> Stream.runStream strm zstate)
runStreamIO strm zstate = stToIO (Stream.runStream strm zstate)
compressStreamIO :: Stream.Format -> CompressParams -> CompressStream IO
compressStreamIO format params =
CompressInputRequired {
compressSupplyInput = \chunk -> do
zstate <- mkStateIO
let next = compressStream format params
(strm', zstate') <- runStreamIO (next chunk) zstate
return (go strm' zstate')
}
where
go :: CompressStream Stream -> Stream.State RealWorld -> CompressStream IO
go (CompressInputRequired next) zstate =
CompressInputRequired {
compressSupplyInput = \chunk -> do
(strm', zstate') <- runStreamIO (next chunk) zstate
return (go strm' zstate')
}
go (CompressOutputAvailable chunk next) zstate =
CompressOutputAvailable chunk $ do
(strm', zstate') <- runStreamIO next zstate
return (go strm' zstate')
go CompressStreamEnd _ = CompressStreamEnd
compressStreamST :: Stream.Format -> CompressParams -> CompressStream (ST s)
compressStreamST format params =
CompressInputRequired {
compressSupplyInput = \chunk -> do
zstate <- mkStateST
let next = compressStream format params
(strm', zstate') <- runStreamST (next chunk) zstate
return (go strm' zstate')
}
where
go :: CompressStream Stream -> Stream.State s -> CompressStream (ST s)
go (CompressInputRequired next) zstate =
CompressInputRequired {
compressSupplyInput = \chunk -> do
(strm', zstate') <- runStreamST (next chunk) zstate
return (go strm' zstate')
}
go (CompressOutputAvailable chunk next) zstate =
CompressOutputAvailable chunk $ do
(strm', zstate') <- runStreamST next zstate
return (go strm' zstate')
go CompressStreamEnd _ = CompressStreamEnd
decompressStreamIO :: Stream.Format -> DecompressParams -> DecompressStream IO
decompressStreamIO format params =
DecompressInputRequired $ \chunk -> do
zstate <- mkStateIO
let next = decompressStream format params False
(strm', zstate') <- runStreamIO (next chunk) zstate
go strm' zstate' (S.null chunk)
where
go :: DecompressStream Stream -> Stream.State RealWorld -> Bool
-> IO (DecompressStream IO)
go (DecompressInputRequired next) zstate !_ =
return $ DecompressInputRequired $ \chunk -> do
(strm', zstate') <- runStreamIO (next chunk) zstate
go strm' zstate' (S.null chunk)
go (DecompressOutputAvailable chunk next) zstate !eof =
return $ DecompressOutputAvailable chunk $ do
(strm', zstate') <- runStreamIO next zstate
go strm' zstate' eof
go (DecompressStreamEnd unconsumed) zstate !eof
| format == Stream.gzipFormat
, decompressAllMembers params
, not eof = tryFollowingStream unconsumed zstate
| otherwise = finaliseStreamEnd unconsumed zstate
go (DecompressStreamError err) zstate !_ = finaliseStreamError err zstate
tryFollowingStream :: S.ByteString -> Stream.State RealWorld -> IO (DecompressStream IO)
tryFollowingStream chunk zstate = case S.length chunk of
0 -> return $ DecompressInputRequired $ \chunk' -> case S.length chunk' of
0 -> finaliseStreamEnd S.empty zstate
1 | S.head chunk' /= 0x1f
-> finaliseStreamEnd chunk' zstate
1 -> return $ DecompressInputRequired $ \chunk'' -> case S.length chunk'' of
0 -> finaliseStreamEnd chunk' zstate
_ -> checkHeaderSplit (S.head chunk') chunk'' zstate
_ -> checkHeader chunk' zstate
1 -> return $ DecompressInputRequired $ \chunk' -> case S.length chunk' of
0 -> finaliseStreamEnd chunk zstate
_ -> checkHeaderSplit (S.head chunk) chunk' zstate
_ -> checkHeader chunk zstate
checkHeaderSplit :: Word8 -> S.ByteString -> Stream.State RealWorld -> IO (DecompressStream IO)
checkHeaderSplit 0x1f chunk zstate
| S.head chunk == 0x8b = do
let resume = decompressStream format params True (S.pack [0x1f, 0x8b])
if S.length chunk > 1
then do
-- have to handle the remaining data in this chunk
(DecompressInputRequired next, zstate') <- runStreamIO resume zstate
(strm', zstate'') <- runStreamIO (next (S.tail chunk)) zstate'
go strm' zstate'' False
else do
-- subtle special case when the chunk tail is empty
-- yay for QC tests
(strm, zstate') <- runStreamIO resume zstate
go strm zstate' False
checkHeaderSplit byte chunk zstate =
finaliseStreamEnd (S.cons byte chunk) zstate
checkHeader :: S.ByteString -> Stream.State RealWorld -> IO (DecompressStream IO)
checkHeader chunk zstate
| S.index chunk 0 == 0x1f
, S.index chunk 1 == 0x8b = do
let resume = decompressStream format params True chunk
(strm', zstate') <- runStreamIO resume zstate
go strm' zstate' False
checkHeader chunk zstate = finaliseStreamEnd chunk zstate
finaliseStreamEnd unconsumed zstate = do
_ <- runStreamIO Stream.finalise zstate
return (DecompressStreamEnd unconsumed)
finaliseStreamError err zstate = do
_ <- runStreamIO Stream.finalise zstate
return (DecompressStreamError err)
decompressStreamST :: Stream.Format -> DecompressParams -> DecompressStream (ST s)
decompressStreamST format params =
DecompressInputRequired $ \chunk -> do
zstate <- mkStateST
let next = decompressStream format params False
(strm', zstate') <- runStreamST (next chunk) zstate
go strm' zstate' (S.null chunk)
where
go :: DecompressStream Stream -> Stream.State s -> Bool
-> ST s (DecompressStream (ST s))
go (DecompressInputRequired next) zstate !_ =
return $ DecompressInputRequired $ \chunk -> do
(strm', zstate') <- runStreamST (next chunk) zstate
go strm' zstate' (S.null chunk)
go (DecompressOutputAvailable chunk next) zstate !eof =
return $ DecompressOutputAvailable chunk $ do
(strm', zstate') <- runStreamST next zstate
go strm' zstate' eof
go (DecompressStreamEnd unconsumed) zstate !eof
| format == Stream.gzipFormat
, decompressAllMembers params
, not eof = tryFollowingStream unconsumed zstate
| otherwise = finaliseStreamEnd unconsumed zstate
go (DecompressStreamError err) zstate !_ = finaliseStreamError err zstate
tryFollowingStream :: S.ByteString -> Stream.State s -> ST s (DecompressStream (ST s))
tryFollowingStream chunk zstate =
case S.length chunk of
0 -> return $ DecompressInputRequired $ \chunk' -> case S.length chunk' of
0 -> finaliseStreamEnd S.empty zstate
1 | S.head chunk' /= 0x1f
-> finaliseStreamEnd chunk' zstate
1 -> return $ DecompressInputRequired $ \chunk'' -> case S.length chunk'' of
0 -> finaliseStreamEnd chunk' zstate
_ -> checkHeaderSplit (S.head chunk') chunk'' zstate
_ -> checkHeader chunk' zstate
1 -> return $ DecompressInputRequired $ \chunk' -> case S.length chunk' of
0 -> finaliseStreamEnd chunk zstate
_ -> checkHeaderSplit (S.head chunk) chunk' zstate
_ -> checkHeader chunk zstate
checkHeaderSplit :: Word8 -> S.ByteString -> Stream.State s -> ST s (DecompressStream (ST s))
checkHeaderSplit 0x1f chunk zstate
| S.head chunk == 0x8b = do
let resume = decompressStream format params True (S.pack [0x1f, 0x8b])
if S.length chunk > 1
then do
-- have to handle the remaining data in this chunk
(DecompressInputRequired next, zstate') <- runStreamST resume zstate
(strm', zstate'') <- runStreamST (next (S.tail chunk)) zstate'
go strm' zstate'' False
else do
-- subtle special case when the chunk tail is empty
-- yay for QC tests
(strm, zstate') <- runStreamST resume zstate
go strm zstate' False
checkHeaderSplit byte chunk zstate =
finaliseStreamEnd (S.cons byte chunk) zstate
checkHeader :: S.ByteString -> Stream.State s -> ST s (DecompressStream (ST s))
checkHeader chunk zstate
| S.index chunk 0 == 0x1f
, S.index chunk 1 == 0x8b = do
let resume = decompressStream format params True chunk
(strm', zstate') <- runStreamST resume zstate
go strm' zstate' False
checkHeader chunk zstate = finaliseStreamEnd chunk zstate
finaliseStreamEnd unconsumed zstate = do
_ <- runStreamST Stream.finalise zstate
return (DecompressStreamEnd unconsumed)
finaliseStreamError err zstate = do
_ <- runStreamST Stream.finalise zstate
return (DecompressStreamError err)

View File

@@ -1,68 +0,0 @@
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) 2006-2014 Duncan Coutts
-- License : BSD-style
--
-- Maintainer : duncan@community.haskell.org
--
-- Compression and decompression of data streams in the raw deflate format.
--
-- The format is described in detail in RFC #1951:
-- <http://www.ietf.org/rfc/rfc1951.txt>
--
-- See also the zlib home page: <http://zlib.net/>
--
-----------------------------------------------------------------------------
module Codec.Compression.Zlib.Raw (
-- * Simple compression and decompression
compress,
decompress,
-- * Extended api with control over compression parameters
compressWith,
decompressWith,
CompressParams(..), defaultCompressParams,
DecompressParams(..), defaultDecompressParams,
-- ** The compression parameter types
CompressionLevel(..),
defaultCompression,
noCompression,
bestSpeed,
bestCompression,
compressionLevel,
Method(..),
deflateMethod,
WindowBits(..),
defaultWindowBits,
windowBits,
MemoryLevel(..),
defaultMemoryLevel,
minMemoryLevel,
maxMemoryLevel,
memoryLevel,
CompressionStrategy(..),
defaultStrategy,
filteredStrategy,
huffmanOnlyStrategy,
) where
import Data.ByteString.Lazy (ByteString)
import qualified Codec.Compression.Zlib.Internal as Internal
import Codec.Compression.Zlib.Internal hiding (compress, decompress)
decompress :: ByteString -> ByteString
decompress = decompressWith defaultDecompressParams
decompressWith :: DecompressParams -> ByteString -> ByteString
decompressWith = Internal.decompress rawFormat
compress :: ByteString -> ByteString
compress = compressWith defaultCompressParams
compressWith :: CompressParams -> ByteString -> ByteString
compressWith = Internal.compress rawFormat

File diff suppressed because it is too large Load Diff

24
3rdparty/zlib/LICENSE vendored
View File

@@ -1,24 +0,0 @@
Copyright (c) 2006-2016, Duncan Coutts
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. This clause is intentionally left blank.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.

View File

@@ -1,2 +0,0 @@
import Distribution.Simple
main = defaultMain

3
3rdparty/zlib/TODO vendored
View File

@@ -1,3 +0,0 @@
* Currently will not support bytestring chunks bigger than 4Gb
because the in_avail is only a C int. So chunks bigger than that
need to be broken up into smaller ones.

View File

@@ -1,179 +0,0 @@
/* adler32.c -- compute the Adler-32 checksum of a data stream
* Copyright (C) 1995-2011 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id$ */
#include "zutil.h"
#define local static
local uLong adler32_combine_ OF((uLong adler1, uLong adler2, z_off64_t len2));
#define BASE 65521 /* largest prime smaller than 65536 */
#define NMAX 5552
/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
#define DO1(buf,i) {adler += (buf)[i]; sum2 += adler;}
#define DO2(buf,i) DO1(buf,i); DO1(buf,i+1);
#define DO4(buf,i) DO2(buf,i); DO2(buf,i+2);
#define DO8(buf,i) DO4(buf,i); DO4(buf,i+4);
#define DO16(buf) DO8(buf,0); DO8(buf,8);
/* use NO_DIVIDE if your processor does not do division in hardware --
try it both ways to see which is faster */
#ifdef NO_DIVIDE
/* note that this assumes BASE is 65521, where 65536 % 65521 == 15
(thank you to John Reiser for pointing this out) */
# define CHOP(a) \
do { \
unsigned long tmp = a >> 16; \
a &= 0xffffUL; \
a += (tmp << 4) - tmp; \
} while (0)
# define MOD28(a) \
do { \
CHOP(a); \
if (a >= BASE) a -= BASE; \
} while (0)
# define MOD(a) \
do { \
CHOP(a); \
MOD28(a); \
} while (0)
# define MOD63(a) \
do { /* this assumes a is not negative */ \
z_off64_t tmp = a >> 32; \
a &= 0xffffffffL; \
a += (tmp << 8) - (tmp << 5) + tmp; \
tmp = a >> 16; \
a &= 0xffffL; \
a += (tmp << 4) - tmp; \
tmp = a >> 16; \
a &= 0xffffL; \
a += (tmp << 4) - tmp; \
if (a >= BASE) a -= BASE; \
} while (0)
#else
# define MOD(a) a %= BASE
# define MOD28(a) a %= BASE
# define MOD63(a) a %= BASE
#endif
/* ========================================================================= */
uLong ZEXPORT adler32(adler, buf, len)
uLong adler;
const Bytef *buf;
uInt len;
{
unsigned long sum2;
unsigned n;
/* split Adler-32 into component sums */
sum2 = (adler >> 16) & 0xffff;
adler &= 0xffff;
/* in case user likes doing a byte at a time, keep it fast */
if (len == 1) {
adler += buf[0];
if (adler >= BASE)
adler -= BASE;
sum2 += adler;
if (sum2 >= BASE)
sum2 -= BASE;
return adler | (sum2 << 16);
}
/* initial Adler-32 value (deferred check for len == 1 speed) */
if (buf == Z_NULL)
return 1L;
/* in case short lengths are provided, keep it somewhat fast */
if (len < 16) {
while (len--) {
adler += *buf++;
sum2 += adler;
}
if (adler >= BASE)
adler -= BASE;
MOD28(sum2); /* only added so many BASE's */
return adler | (sum2 << 16);
}
/* do length NMAX blocks -- requires just one modulo operation */
while (len >= NMAX) {
len -= NMAX;
n = NMAX / 16; /* NMAX is divisible by 16 */
do {
DO16(buf); /* 16 sums unrolled */
buf += 16;
} while (--n);
MOD(adler);
MOD(sum2);
}
/* do remaining bytes (less than NMAX, still just one modulo) */
if (len) { /* avoid modulos if none remaining */
while (len >= 16) {
len -= 16;
DO16(buf);
buf += 16;
}
while (len--) {
adler += *buf++;
sum2 += adler;
}
MOD(adler);
MOD(sum2);
}
/* return recombined sums */
return adler | (sum2 << 16);
}
/* ========================================================================= */
local uLong adler32_combine_(adler1, adler2, len2)
uLong adler1;
uLong adler2;
z_off64_t len2;
{
unsigned long sum1;
unsigned long sum2;
unsigned rem;
/* for negative len, return invalid adler32 as a clue for debugging */
if (len2 < 0)
return 0xffffffffUL;
/* the derivation of this formula is left as an exercise for the reader */
MOD63(len2); /* assumes len2 >= 0 */
rem = (unsigned)len2;
sum1 = adler1 & 0xffff;
sum2 = rem * sum1;
MOD(sum2);
sum1 += (adler2 & 0xffff) + BASE - 1;
sum2 += ((adler1 >> 16) & 0xffff) + ((adler2 >> 16) & 0xffff) + BASE - rem;
if (sum1 >= BASE) sum1 -= BASE;
if (sum1 >= BASE) sum1 -= BASE;
if (sum2 >= (BASE << 1)) sum2 -= (BASE << 1);
if (sum2 >= BASE) sum2 -= BASE;
return sum1 | (sum2 << 16);
}
/* ========================================================================= */
uLong ZEXPORT adler32_combine(adler1, adler2, len2)
uLong adler1;
uLong adler2;
z_off_t len2;
{
return adler32_combine_(adler1, adler2, len2);
}
uLong ZEXPORT adler32_combine64(adler1, adler2, len2)
uLong adler1;
uLong adler2;
z_off64_t len2;
{
return adler32_combine_(adler1, adler2, len2);
}

View File

@@ -1,80 +0,0 @@
/* compress.c -- compress a memory buffer
* Copyright (C) 1995-2005 Jean-loup Gailly.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id$ */
#define ZLIB_INTERNAL
#include "zlib.h"
/* ===========================================================================
Compresses the source buffer into the destination buffer. The level
parameter has the same meaning as in deflateInit. sourceLen is the byte
length of the source buffer. Upon entry, destLen is the total size of the
destination buffer, which must be at least 0.1% larger than sourceLen plus
12 bytes. Upon exit, destLen is the actual size of the compressed buffer.
compress2 returns Z_OK if success, Z_MEM_ERROR if there was not enough
memory, Z_BUF_ERROR if there was not enough room in the output buffer,
Z_STREAM_ERROR if the level parameter is invalid.
*/
int ZEXPORT compress2 (dest, destLen, source, sourceLen, level)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
uLong sourceLen;
int level;
{
z_stream stream;
int err;
stream.next_in = (z_const Bytef *)source;
stream.avail_in = (uInt)sourceLen;
#ifdef MAXSEG_64K
/* Check for source > 64K on 16-bit machine: */
if ((uLong)stream.avail_in != sourceLen) return Z_BUF_ERROR;
#endif
stream.next_out = dest;
stream.avail_out = (uInt)*destLen;
if ((uLong)stream.avail_out != *destLen) return Z_BUF_ERROR;
stream.zalloc = (alloc_func)0;
stream.zfree = (free_func)0;
stream.opaque = (voidpf)0;
err = deflateInit(&stream, level);
if (err != Z_OK) return err;
err = deflate(&stream, Z_FINISH);
if (err != Z_STREAM_END) {
deflateEnd(&stream);
return err == Z_OK ? Z_BUF_ERROR : err;
}
*destLen = stream.total_out;
err = deflateEnd(&stream);
return err;
}
/* ===========================================================================
*/
int ZEXPORT compress (dest, destLen, source, sourceLen)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
uLong sourceLen;
{
return compress2(dest, destLen, source, sourceLen, Z_DEFAULT_COMPRESSION);
}
/* ===========================================================================
If the default memLevel or windowBits for deflateInit() is changed, then
this function needs to be updated.
*/
uLong ZEXPORT compressBound (sourceLen)
uLong sourceLen;
{
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
(sourceLen >> 25) + 13;
}

View File

@@ -1,425 +0,0 @@
/* crc32.c -- compute the CRC-32 of a data stream
* Copyright (C) 1995-2006, 2010, 2011, 2012 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*
* Thanks to Rodney Brown <rbrown64@csc.com.au> for his contribution of faster
* CRC methods: exclusive-oring 32 bits of data at a time, and pre-computing
* tables for updating the shift register in one step with three exclusive-ors
* instead of four steps with four exclusive-ors. This results in about a
* factor of two increase in speed on a Power PC G4 (PPC7455) using gcc -O3.
*/
/* @(#) $Id$ */
/*
Note on the use of DYNAMIC_CRC_TABLE: there is no mutex or semaphore
protection on the static variables used to control the first-use generation
of the crc tables. Therefore, if you #define DYNAMIC_CRC_TABLE, you should
first call get_crc_table() to initialize the tables before allowing more than
one thread to use crc32().
DYNAMIC_CRC_TABLE and MAKECRCH can be #defined to write out crc32.h.
*/
#ifdef MAKECRCH
# include <stdio.h>
# ifndef DYNAMIC_CRC_TABLE
# define DYNAMIC_CRC_TABLE
# endif /* !DYNAMIC_CRC_TABLE */
#endif /* MAKECRCH */
#include "zutil.h" /* for STDC and FAR definitions */
#define local static
/* Definitions for doing the crc four data bytes at a time. */
#if !defined(NOBYFOUR) && defined(Z_U4)
# define BYFOUR
#endif
#ifdef BYFOUR
local unsigned long crc32_little OF((unsigned long,
const unsigned char FAR *, unsigned));
local unsigned long crc32_big OF((unsigned long,
const unsigned char FAR *, unsigned));
# define TBLS 8
#else
# define TBLS 1
#endif /* BYFOUR */
/* Local functions for crc concatenation */
local unsigned long gf2_matrix_times OF((unsigned long *mat,
unsigned long vec));
local void gf2_matrix_square OF((unsigned long *square, unsigned long *mat));
local uLong crc32_combine_ OF((uLong crc1, uLong crc2, z_off64_t len2));
#ifdef DYNAMIC_CRC_TABLE
local volatile int crc_table_empty = 1;
local z_crc_t FAR crc_table[TBLS][256];
local void make_crc_table OF((void));
#ifdef MAKECRCH
local void write_table OF((FILE *, const z_crc_t FAR *));
#endif /* MAKECRCH */
/*
Generate tables for a byte-wise 32-bit CRC calculation on the polynomial:
x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x+1.
Polynomials over GF(2) are represented in binary, one bit per coefficient,
with the lowest powers in the most significant bit. Then adding polynomials
is just exclusive-or, and multiplying a polynomial by x is a right shift by
one. If we call the above polynomial p, and represent a byte as the
polynomial q, also with the lowest power in the most significant bit (so the
byte 0xb1 is the polynomial x^7+x^3+x+1), then the CRC is (q*x^32) mod p,
where a mod b means the remainder after dividing a by b.
This calculation is done using the shift-register method of multiplying and
taking the remainder. The register is initialized to zero, and for each
incoming bit, x^32 is added mod p to the register if the bit is a one (where
x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by
x (which is shifting right by one and adding x^32 mod p if the bit shifted
out is a one). We start with the highest power (least significant bit) of
q and repeat for all eight bits of q.
The first table is simply the CRC of all possible eight bit values. This is
all the information needed to generate CRCs on data a byte at a time for all
combinations of CRC register values and incoming bytes. The remaining tables
allow for word-at-a-time CRC calculation for both big-endian and little-
endian machines, where a word is four bytes.
*/
local void make_crc_table()
{
z_crc_t c;
int n, k;
z_crc_t poly; /* polynomial exclusive-or pattern */
/* terms of polynomial defining this crc (except x^32): */
static volatile int first = 1; /* flag to limit concurrent making */
static const unsigned char p[] = {0,1,2,4,5,7,8,10,11,12,16,22,23,26};
/* See if another task is already doing this (not thread-safe, but better
than nothing -- significantly reduces duration of vulnerability in
case the advice about DYNAMIC_CRC_TABLE is ignored) */
if (first) {
first = 0;
/* make exclusive-or pattern from polynomial (0xedb88320UL) */
poly = 0;
for (n = 0; n < (int)(sizeof(p)/sizeof(unsigned char)); n++)
poly |= (z_crc_t)1 << (31 - p[n]);
/* generate a crc for every 8-bit value */
for (n = 0; n < 256; n++) {
c = (z_crc_t)n;
for (k = 0; k < 8; k++)
c = c & 1 ? poly ^ (c >> 1) : c >> 1;
crc_table[0][n] = c;
}
#ifdef BYFOUR
/* generate crc for each value followed by one, two, and three zeros,
and then the byte reversal of those as well as the first table */
for (n = 0; n < 256; n++) {
c = crc_table[0][n];
crc_table[4][n] = ZSWAP32(c);
for (k = 1; k < 4; k++) {
c = crc_table[0][c & 0xff] ^ (c >> 8);
crc_table[k][n] = c;
crc_table[k + 4][n] = ZSWAP32(c);
}
}
#endif /* BYFOUR */
crc_table_empty = 0;
}
else { /* not first */
/* wait for the other guy to finish (not efficient, but rare) */
while (crc_table_empty)
;
}
#ifdef MAKECRCH
/* write out CRC tables to crc32.h */
{
FILE *out;
out = fopen("crc32.h", "w");
if (out == NULL) return;
fprintf(out, "/* crc32.h -- tables for rapid CRC calculation\n");
fprintf(out, " * Generated automatically by crc32.c\n */\n\n");
fprintf(out, "local const z_crc_t FAR ");
fprintf(out, "crc_table[TBLS][256] =\n{\n {\n");
write_table(out, crc_table[0]);
# ifdef BYFOUR
fprintf(out, "#ifdef BYFOUR\n");
for (k = 1; k < 8; k++) {
fprintf(out, " },\n {\n");
write_table(out, crc_table[k]);
}
fprintf(out, "#endif\n");
# endif /* BYFOUR */
fprintf(out, " }\n};\n");
fclose(out);
}
#endif /* MAKECRCH */
}
#ifdef MAKECRCH
local void write_table(out, table)
FILE *out;
const z_crc_t FAR *table;
{
int n;
for (n = 0; n < 256; n++)
fprintf(out, "%s0x%08lxUL%s", n % 5 ? "" : " ",
(unsigned long)(table[n]),
n == 255 ? "\n" : (n % 5 == 4 ? ",\n" : ", "));
}
#endif /* MAKECRCH */
#else /* !DYNAMIC_CRC_TABLE */
/* ========================================================================
* Tables of CRC-32s of all single-byte values, made by make_crc_table().
*/
#include "crc32.h"
#endif /* DYNAMIC_CRC_TABLE */
/* =========================================================================
* This function can be used by asm versions of crc32()
*/
const z_crc_t FAR * ZEXPORT get_crc_table()
{
#ifdef DYNAMIC_CRC_TABLE
if (crc_table_empty)
make_crc_table();
#endif /* DYNAMIC_CRC_TABLE */
return (const z_crc_t FAR *)crc_table;
}
/* ========================================================================= */
#define DO1 crc = crc_table[0][((int)crc ^ (*buf++)) & 0xff] ^ (crc >> 8)
#define DO8 DO1; DO1; DO1; DO1; DO1; DO1; DO1; DO1
/* ========================================================================= */
unsigned long ZEXPORT crc32(crc, buf, len)
unsigned long crc;
const unsigned char FAR *buf;
uInt len;
{
if (buf == Z_NULL) return 0UL;
#ifdef DYNAMIC_CRC_TABLE
if (crc_table_empty)
make_crc_table();
#endif /* DYNAMIC_CRC_TABLE */
#ifdef BYFOUR
if (sizeof(void *) == sizeof(ptrdiff_t)) {
z_crc_t endian;
endian = 1;
if (*((unsigned char *)(&endian)))
return crc32_little(crc, buf, len);
else
return crc32_big(crc, buf, len);
}
#endif /* BYFOUR */
crc = crc ^ 0xffffffffUL;
while (len >= 8) {
DO8;
len -= 8;
}
if (len) do {
DO1;
} while (--len);
return crc ^ 0xffffffffUL;
}
#ifdef BYFOUR
/* ========================================================================= */
#define DOLIT4 c ^= *buf4++; \
c = crc_table[3][c & 0xff] ^ crc_table[2][(c >> 8) & 0xff] ^ \
crc_table[1][(c >> 16) & 0xff] ^ crc_table[0][c >> 24]
#define DOLIT32 DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4
/* ========================================================================= */
local unsigned long crc32_little(crc, buf, len)
unsigned long crc;
const unsigned char FAR *buf;
unsigned len;
{
register z_crc_t c;
register const z_crc_t FAR *buf4;
c = (z_crc_t)crc;
c = ~c;
while (len && ((ptrdiff_t)buf & 3)) {
c = crc_table[0][(c ^ *buf++) & 0xff] ^ (c >> 8);
len--;
}
buf4 = (const z_crc_t FAR *)(const void FAR *)buf;
while (len >= 32) {
DOLIT32;
len -= 32;
}
while (len >= 4) {
DOLIT4;
len -= 4;
}
buf = (const unsigned char FAR *)buf4;
if (len) do {
c = crc_table[0][(c ^ *buf++) & 0xff] ^ (c >> 8);
} while (--len);
c = ~c;
return (unsigned long)c;
}
/* ========================================================================= */
#define DOBIG4 c ^= *++buf4; \
c = crc_table[4][c & 0xff] ^ crc_table[5][(c >> 8) & 0xff] ^ \
crc_table[6][(c >> 16) & 0xff] ^ crc_table[7][c >> 24]
#define DOBIG32 DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4
/* ========================================================================= */
local unsigned long crc32_big(crc, buf, len)
unsigned long crc;
const unsigned char FAR *buf;
unsigned len;
{
register z_crc_t c;
register const z_crc_t FAR *buf4;
c = ZSWAP32((z_crc_t)crc);
c = ~c;
while (len && ((ptrdiff_t)buf & 3)) {
c = crc_table[4][(c >> 24) ^ *buf++] ^ (c << 8);
len--;
}
buf4 = (const z_crc_t FAR *)(const void FAR *)buf;
buf4--;
while (len >= 32) {
DOBIG32;
len -= 32;
}
while (len >= 4) {
DOBIG4;
len -= 4;
}
buf4++;
buf = (const unsigned char FAR *)buf4;
if (len) do {
c = crc_table[4][(c >> 24) ^ *buf++] ^ (c << 8);
} while (--len);
c = ~c;
return (unsigned long)(ZSWAP32(c));
}
#endif /* BYFOUR */
#define GF2_DIM 32 /* dimension of GF(2) vectors (length of CRC) */
/* ========================================================================= */
local unsigned long gf2_matrix_times(mat, vec)
unsigned long *mat;
unsigned long vec;
{
unsigned long sum;
sum = 0;
while (vec) {
if (vec & 1)
sum ^= *mat;
vec >>= 1;
mat++;
}
return sum;
}
/* ========================================================================= */
local void gf2_matrix_square(square, mat)
unsigned long *square;
unsigned long *mat;
{
int n;
for (n = 0; n < GF2_DIM; n++)
square[n] = gf2_matrix_times(mat, mat[n]);
}
/* ========================================================================= */
local uLong crc32_combine_(crc1, crc2, len2)
uLong crc1;
uLong crc2;
z_off64_t len2;
{
int n;
unsigned long row;
unsigned long even[GF2_DIM]; /* even-power-of-two zeros operator */
unsigned long odd[GF2_DIM]; /* odd-power-of-two zeros operator */
/* degenerate case (also disallow negative lengths) */
if (len2 <= 0)
return crc1;
/* put operator for one zero bit in odd */
odd[0] = 0xedb88320UL; /* CRC-32 polynomial */
row = 1;
for (n = 1; n < GF2_DIM; n++) {
odd[n] = row;
row <<= 1;
}
/* put operator for two zero bits in even */
gf2_matrix_square(even, odd);
/* put operator for four zero bits in odd */
gf2_matrix_square(odd, even);
/* apply len2 zeros to crc1 (first square will put the operator for one
zero byte, eight zero bits, in even) */
do {
/* apply zeros operator for this bit of len2 */
gf2_matrix_square(even, odd);
if (len2 & 1)
crc1 = gf2_matrix_times(even, crc1);
len2 >>= 1;
/* if no more bits set, then done */
if (len2 == 0)
break;
/* another iteration of the loop with odd and even swapped */
gf2_matrix_square(odd, even);
if (len2 & 1)
crc1 = gf2_matrix_times(odd, crc1);
len2 >>= 1;
/* if no more bits set, then done */
} while (len2 != 0);
/* return combined crc */
crc1 ^= crc2;
return crc1;
}
/* ========================================================================= */
uLong ZEXPORT crc32_combine(crc1, crc2, len2)
uLong crc1;
uLong crc2;
z_off_t len2;
{
return crc32_combine_(crc1, crc2, len2);
}
uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
uLong crc1;
uLong crc2;
z_off64_t len2;
{
return crc32_combine_(crc1, crc2, len2);
}

View File

@@ -1,441 +0,0 @@
/* crc32.h -- tables for rapid CRC calculation
* Generated automatically by crc32.c
*/
local const z_crc_t FAR crc_table[TBLS][256] =
{
{
0x00000000UL, 0x77073096UL, 0xee0e612cUL, 0x990951baUL, 0x076dc419UL,
0x706af48fUL, 0xe963a535UL, 0x9e6495a3UL, 0x0edb8832UL, 0x79dcb8a4UL,
0xe0d5e91eUL, 0x97d2d988UL, 0x09b64c2bUL, 0x7eb17cbdUL, 0xe7b82d07UL,
0x90bf1d91UL, 0x1db71064UL, 0x6ab020f2UL, 0xf3b97148UL, 0x84be41deUL,
0x1adad47dUL, 0x6ddde4ebUL, 0xf4d4b551UL, 0x83d385c7UL, 0x136c9856UL,
0x646ba8c0UL, 0xfd62f97aUL, 0x8a65c9ecUL, 0x14015c4fUL, 0x63066cd9UL,
0xfa0f3d63UL, 0x8d080df5UL, 0x3b6e20c8UL, 0x4c69105eUL, 0xd56041e4UL,
0xa2677172UL, 0x3c03e4d1UL, 0x4b04d447UL, 0xd20d85fdUL, 0xa50ab56bUL,
0x35b5a8faUL, 0x42b2986cUL, 0xdbbbc9d6UL, 0xacbcf940UL, 0x32d86ce3UL,
0x45df5c75UL, 0xdcd60dcfUL, 0xabd13d59UL, 0x26d930acUL, 0x51de003aUL,
0xc8d75180UL, 0xbfd06116UL, 0x21b4f4b5UL, 0x56b3c423UL, 0xcfba9599UL,
0xb8bda50fUL, 0x2802b89eUL, 0x5f058808UL, 0xc60cd9b2UL, 0xb10be924UL,
0x2f6f7c87UL, 0x58684c11UL, 0xc1611dabUL, 0xb6662d3dUL, 0x76dc4190UL,
0x01db7106UL, 0x98d220bcUL, 0xefd5102aUL, 0x71b18589UL, 0x06b6b51fUL,
0x9fbfe4a5UL, 0xe8b8d433UL, 0x7807c9a2UL, 0x0f00f934UL, 0x9609a88eUL,
0xe10e9818UL, 0x7f6a0dbbUL, 0x086d3d2dUL, 0x91646c97UL, 0xe6635c01UL,
0x6b6b51f4UL, 0x1c6c6162UL, 0x856530d8UL, 0xf262004eUL, 0x6c0695edUL,
0x1b01a57bUL, 0x8208f4c1UL, 0xf50fc457UL, 0x65b0d9c6UL, 0x12b7e950UL,
0x8bbeb8eaUL, 0xfcb9887cUL, 0x62dd1ddfUL, 0x15da2d49UL, 0x8cd37cf3UL,
0xfbd44c65UL, 0x4db26158UL, 0x3ab551ceUL, 0xa3bc0074UL, 0xd4bb30e2UL,
0x4adfa541UL, 0x3dd895d7UL, 0xa4d1c46dUL, 0xd3d6f4fbUL, 0x4369e96aUL,
0x346ed9fcUL, 0xad678846UL, 0xda60b8d0UL, 0x44042d73UL, 0x33031de5UL,
0xaa0a4c5fUL, 0xdd0d7cc9UL, 0x5005713cUL, 0x270241aaUL, 0xbe0b1010UL,
0xc90c2086UL, 0x5768b525UL, 0x206f85b3UL, 0xb966d409UL, 0xce61e49fUL,
0x5edef90eUL, 0x29d9c998UL, 0xb0d09822UL, 0xc7d7a8b4UL, 0x59b33d17UL,
0x2eb40d81UL, 0xb7bd5c3bUL, 0xc0ba6cadUL, 0xedb88320UL, 0x9abfb3b6UL,
0x03b6e20cUL, 0x74b1d29aUL, 0xead54739UL, 0x9dd277afUL, 0x04db2615UL,
0x73dc1683UL, 0xe3630b12UL, 0x94643b84UL, 0x0d6d6a3eUL, 0x7a6a5aa8UL,
0xe40ecf0bUL, 0x9309ff9dUL, 0x0a00ae27UL, 0x7d079eb1UL, 0xf00f9344UL,
0x8708a3d2UL, 0x1e01f268UL, 0x6906c2feUL, 0xf762575dUL, 0x806567cbUL,
0x196c3671UL, 0x6e6b06e7UL, 0xfed41b76UL, 0x89d32be0UL, 0x10da7a5aUL,
0x67dd4accUL, 0xf9b9df6fUL, 0x8ebeeff9UL, 0x17b7be43UL, 0x60b08ed5UL,
0xd6d6a3e8UL, 0xa1d1937eUL, 0x38d8c2c4UL, 0x4fdff252UL, 0xd1bb67f1UL,
0xa6bc5767UL, 0x3fb506ddUL, 0x48b2364bUL, 0xd80d2bdaUL, 0xaf0a1b4cUL,
0x36034af6UL, 0x41047a60UL, 0xdf60efc3UL, 0xa867df55UL, 0x316e8eefUL,
0x4669be79UL, 0xcb61b38cUL, 0xbc66831aUL, 0x256fd2a0UL, 0x5268e236UL,
0xcc0c7795UL, 0xbb0b4703UL, 0x220216b9UL, 0x5505262fUL, 0xc5ba3bbeUL,
0xb2bd0b28UL, 0x2bb45a92UL, 0x5cb36a04UL, 0xc2d7ffa7UL, 0xb5d0cf31UL,
0x2cd99e8bUL, 0x5bdeae1dUL, 0x9b64c2b0UL, 0xec63f226UL, 0x756aa39cUL,
0x026d930aUL, 0x9c0906a9UL, 0xeb0e363fUL, 0x72076785UL, 0x05005713UL,
0x95bf4a82UL, 0xe2b87a14UL, 0x7bb12baeUL, 0x0cb61b38UL, 0x92d28e9bUL,
0xe5d5be0dUL, 0x7cdcefb7UL, 0x0bdbdf21UL, 0x86d3d2d4UL, 0xf1d4e242UL,
0x68ddb3f8UL, 0x1fda836eUL, 0x81be16cdUL, 0xf6b9265bUL, 0x6fb077e1UL,
0x18b74777UL, 0x88085ae6UL, 0xff0f6a70UL, 0x66063bcaUL, 0x11010b5cUL,
0x8f659effUL, 0xf862ae69UL, 0x616bffd3UL, 0x166ccf45UL, 0xa00ae278UL,
0xd70dd2eeUL, 0x4e048354UL, 0x3903b3c2UL, 0xa7672661UL, 0xd06016f7UL,
0x4969474dUL, 0x3e6e77dbUL, 0xaed16a4aUL, 0xd9d65adcUL, 0x40df0b66UL,
0x37d83bf0UL, 0xa9bcae53UL, 0xdebb9ec5UL, 0x47b2cf7fUL, 0x30b5ffe9UL,
0xbdbdf21cUL, 0xcabac28aUL, 0x53b39330UL, 0x24b4a3a6UL, 0xbad03605UL,
0xcdd70693UL, 0x54de5729UL, 0x23d967bfUL, 0xb3667a2eUL, 0xc4614ab8UL,
0x5d681b02UL, 0x2a6f2b94UL, 0xb40bbe37UL, 0xc30c8ea1UL, 0x5a05df1bUL,
0x2d02ef8dUL
#ifdef BYFOUR
},
{
0x00000000UL, 0x191b3141UL, 0x32366282UL, 0x2b2d53c3UL, 0x646cc504UL,
0x7d77f445UL, 0x565aa786UL, 0x4f4196c7UL, 0xc8d98a08UL, 0xd1c2bb49UL,
0xfaefe88aUL, 0xe3f4d9cbUL, 0xacb54f0cUL, 0xb5ae7e4dUL, 0x9e832d8eUL,
0x87981ccfUL, 0x4ac21251UL, 0x53d92310UL, 0x78f470d3UL, 0x61ef4192UL,
0x2eaed755UL, 0x37b5e614UL, 0x1c98b5d7UL, 0x05838496UL, 0x821b9859UL,
0x9b00a918UL, 0xb02dfadbUL, 0xa936cb9aUL, 0xe6775d5dUL, 0xff6c6c1cUL,
0xd4413fdfUL, 0xcd5a0e9eUL, 0x958424a2UL, 0x8c9f15e3UL, 0xa7b24620UL,
0xbea97761UL, 0xf1e8e1a6UL, 0xe8f3d0e7UL, 0xc3de8324UL, 0xdac5b265UL,
0x5d5daeaaUL, 0x44469febUL, 0x6f6bcc28UL, 0x7670fd69UL, 0x39316baeUL,
0x202a5aefUL, 0x0b07092cUL, 0x121c386dUL, 0xdf4636f3UL, 0xc65d07b2UL,
0xed705471UL, 0xf46b6530UL, 0xbb2af3f7UL, 0xa231c2b6UL, 0x891c9175UL,
0x9007a034UL, 0x179fbcfbUL, 0x0e848dbaUL, 0x25a9de79UL, 0x3cb2ef38UL,
0x73f379ffUL, 0x6ae848beUL, 0x41c51b7dUL, 0x58de2a3cUL, 0xf0794f05UL,
0xe9627e44UL, 0xc24f2d87UL, 0xdb541cc6UL, 0x94158a01UL, 0x8d0ebb40UL,
0xa623e883UL, 0xbf38d9c2UL, 0x38a0c50dUL, 0x21bbf44cUL, 0x0a96a78fUL,
0x138d96ceUL, 0x5ccc0009UL, 0x45d73148UL, 0x6efa628bUL, 0x77e153caUL,
0xbabb5d54UL, 0xa3a06c15UL, 0x888d3fd6UL, 0x91960e97UL, 0xded79850UL,
0xc7cca911UL, 0xece1fad2UL, 0xf5facb93UL, 0x7262d75cUL, 0x6b79e61dUL,
0x4054b5deUL, 0x594f849fUL, 0x160e1258UL, 0x0f152319UL, 0x243870daUL,
0x3d23419bUL, 0x65fd6ba7UL, 0x7ce65ae6UL, 0x57cb0925UL, 0x4ed03864UL,
0x0191aea3UL, 0x188a9fe2UL, 0x33a7cc21UL, 0x2abcfd60UL, 0xad24e1afUL,
0xb43fd0eeUL, 0x9f12832dUL, 0x8609b26cUL, 0xc94824abUL, 0xd05315eaUL,
0xfb7e4629UL, 0xe2657768UL, 0x2f3f79f6UL, 0x362448b7UL, 0x1d091b74UL,
0x04122a35UL, 0x4b53bcf2UL, 0x52488db3UL, 0x7965de70UL, 0x607eef31UL,
0xe7e6f3feUL, 0xfefdc2bfUL, 0xd5d0917cUL, 0xcccba03dUL, 0x838a36faUL,
0x9a9107bbUL, 0xb1bc5478UL, 0xa8a76539UL, 0x3b83984bUL, 0x2298a90aUL,
0x09b5fac9UL, 0x10aecb88UL, 0x5fef5d4fUL, 0x46f46c0eUL, 0x6dd93fcdUL,
0x74c20e8cUL, 0xf35a1243UL, 0xea412302UL, 0xc16c70c1UL, 0xd8774180UL,
0x9736d747UL, 0x8e2de606UL, 0xa500b5c5UL, 0xbc1b8484UL, 0x71418a1aUL,
0x685abb5bUL, 0x4377e898UL, 0x5a6cd9d9UL, 0x152d4f1eUL, 0x0c367e5fUL,
0x271b2d9cUL, 0x3e001cddUL, 0xb9980012UL, 0xa0833153UL, 0x8bae6290UL,
0x92b553d1UL, 0xddf4c516UL, 0xc4eff457UL, 0xefc2a794UL, 0xf6d996d5UL,
0xae07bce9UL, 0xb71c8da8UL, 0x9c31de6bUL, 0x852aef2aUL, 0xca6b79edUL,
0xd37048acUL, 0xf85d1b6fUL, 0xe1462a2eUL, 0x66de36e1UL, 0x7fc507a0UL,
0x54e85463UL, 0x4df36522UL, 0x02b2f3e5UL, 0x1ba9c2a4UL, 0x30849167UL,
0x299fa026UL, 0xe4c5aeb8UL, 0xfdde9ff9UL, 0xd6f3cc3aUL, 0xcfe8fd7bUL,
0x80a96bbcUL, 0x99b25afdUL, 0xb29f093eUL, 0xab84387fUL, 0x2c1c24b0UL,
0x350715f1UL, 0x1e2a4632UL, 0x07317773UL, 0x4870e1b4UL, 0x516bd0f5UL,
0x7a468336UL, 0x635db277UL, 0xcbfad74eUL, 0xd2e1e60fUL, 0xf9ccb5ccUL,
0xe0d7848dUL, 0xaf96124aUL, 0xb68d230bUL, 0x9da070c8UL, 0x84bb4189UL,
0x03235d46UL, 0x1a386c07UL, 0x31153fc4UL, 0x280e0e85UL, 0x674f9842UL,
0x7e54a903UL, 0x5579fac0UL, 0x4c62cb81UL, 0x8138c51fUL, 0x9823f45eUL,
0xb30ea79dUL, 0xaa1596dcUL, 0xe554001bUL, 0xfc4f315aUL, 0xd7626299UL,
0xce7953d8UL, 0x49e14f17UL, 0x50fa7e56UL, 0x7bd72d95UL, 0x62cc1cd4UL,
0x2d8d8a13UL, 0x3496bb52UL, 0x1fbbe891UL, 0x06a0d9d0UL, 0x5e7ef3ecUL,
0x4765c2adUL, 0x6c48916eUL, 0x7553a02fUL, 0x3a1236e8UL, 0x230907a9UL,
0x0824546aUL, 0x113f652bUL, 0x96a779e4UL, 0x8fbc48a5UL, 0xa4911b66UL,
0xbd8a2a27UL, 0xf2cbbce0UL, 0xebd08da1UL, 0xc0fdde62UL, 0xd9e6ef23UL,
0x14bce1bdUL, 0x0da7d0fcUL, 0x268a833fUL, 0x3f91b27eUL, 0x70d024b9UL,
0x69cb15f8UL, 0x42e6463bUL, 0x5bfd777aUL, 0xdc656bb5UL, 0xc57e5af4UL,
0xee530937UL, 0xf7483876UL, 0xb809aeb1UL, 0xa1129ff0UL, 0x8a3fcc33UL,
0x9324fd72UL
},
{
0x00000000UL, 0x01c26a37UL, 0x0384d46eUL, 0x0246be59UL, 0x0709a8dcUL,
0x06cbc2ebUL, 0x048d7cb2UL, 0x054f1685UL, 0x0e1351b8UL, 0x0fd13b8fUL,
0x0d9785d6UL, 0x0c55efe1UL, 0x091af964UL, 0x08d89353UL, 0x0a9e2d0aUL,
0x0b5c473dUL, 0x1c26a370UL, 0x1de4c947UL, 0x1fa2771eUL, 0x1e601d29UL,
0x1b2f0bacUL, 0x1aed619bUL, 0x18abdfc2UL, 0x1969b5f5UL, 0x1235f2c8UL,
0x13f798ffUL, 0x11b126a6UL, 0x10734c91UL, 0x153c5a14UL, 0x14fe3023UL,
0x16b88e7aUL, 0x177ae44dUL, 0x384d46e0UL, 0x398f2cd7UL, 0x3bc9928eUL,
0x3a0bf8b9UL, 0x3f44ee3cUL, 0x3e86840bUL, 0x3cc03a52UL, 0x3d025065UL,
0x365e1758UL, 0x379c7d6fUL, 0x35dac336UL, 0x3418a901UL, 0x3157bf84UL,
0x3095d5b3UL, 0x32d36beaUL, 0x331101ddUL, 0x246be590UL, 0x25a98fa7UL,
0x27ef31feUL, 0x262d5bc9UL, 0x23624d4cUL, 0x22a0277bUL, 0x20e69922UL,
0x2124f315UL, 0x2a78b428UL, 0x2bbade1fUL, 0x29fc6046UL, 0x283e0a71UL,
0x2d711cf4UL, 0x2cb376c3UL, 0x2ef5c89aUL, 0x2f37a2adUL, 0x709a8dc0UL,
0x7158e7f7UL, 0x731e59aeUL, 0x72dc3399UL, 0x7793251cUL, 0x76514f2bUL,
0x7417f172UL, 0x75d59b45UL, 0x7e89dc78UL, 0x7f4bb64fUL, 0x7d0d0816UL,
0x7ccf6221UL, 0x798074a4UL, 0x78421e93UL, 0x7a04a0caUL, 0x7bc6cafdUL,
0x6cbc2eb0UL, 0x6d7e4487UL, 0x6f38fadeUL, 0x6efa90e9UL, 0x6bb5866cUL,
0x6a77ec5bUL, 0x68315202UL, 0x69f33835UL, 0x62af7f08UL, 0x636d153fUL,
0x612bab66UL, 0x60e9c151UL, 0x65a6d7d4UL, 0x6464bde3UL, 0x662203baUL,
0x67e0698dUL, 0x48d7cb20UL, 0x4915a117UL, 0x4b531f4eUL, 0x4a917579UL,
0x4fde63fcUL, 0x4e1c09cbUL, 0x4c5ab792UL, 0x4d98dda5UL, 0x46c49a98UL,
0x4706f0afUL, 0x45404ef6UL, 0x448224c1UL, 0x41cd3244UL, 0x400f5873UL,
0x4249e62aUL, 0x438b8c1dUL, 0x54f16850UL, 0x55330267UL, 0x5775bc3eUL,
0x56b7d609UL, 0x53f8c08cUL, 0x523aaabbUL, 0x507c14e2UL, 0x51be7ed5UL,
0x5ae239e8UL, 0x5b2053dfUL, 0x5966ed86UL, 0x58a487b1UL, 0x5deb9134UL,
0x5c29fb03UL, 0x5e6f455aUL, 0x5fad2f6dUL, 0xe1351b80UL, 0xe0f771b7UL,
0xe2b1cfeeUL, 0xe373a5d9UL, 0xe63cb35cUL, 0xe7fed96bUL, 0xe5b86732UL,
0xe47a0d05UL, 0xef264a38UL, 0xeee4200fUL, 0xeca29e56UL, 0xed60f461UL,
0xe82fe2e4UL, 0xe9ed88d3UL, 0xebab368aUL, 0xea695cbdUL, 0xfd13b8f0UL,
0xfcd1d2c7UL, 0xfe976c9eUL, 0xff5506a9UL, 0xfa1a102cUL, 0xfbd87a1bUL,
0xf99ec442UL, 0xf85cae75UL, 0xf300e948UL, 0xf2c2837fUL, 0xf0843d26UL,
0xf1465711UL, 0xf4094194UL, 0xf5cb2ba3UL, 0xf78d95faUL, 0xf64fffcdUL,
0xd9785d60UL, 0xd8ba3757UL, 0xdafc890eUL, 0xdb3ee339UL, 0xde71f5bcUL,
0xdfb39f8bUL, 0xddf521d2UL, 0xdc374be5UL, 0xd76b0cd8UL, 0xd6a966efUL,
0xd4efd8b6UL, 0xd52db281UL, 0xd062a404UL, 0xd1a0ce33UL, 0xd3e6706aUL,
0xd2241a5dUL, 0xc55efe10UL, 0xc49c9427UL, 0xc6da2a7eUL, 0xc7184049UL,
0xc25756ccUL, 0xc3953cfbUL, 0xc1d382a2UL, 0xc011e895UL, 0xcb4dafa8UL,
0xca8fc59fUL, 0xc8c97bc6UL, 0xc90b11f1UL, 0xcc440774UL, 0xcd866d43UL,
0xcfc0d31aUL, 0xce02b92dUL, 0x91af9640UL, 0x906dfc77UL, 0x922b422eUL,
0x93e92819UL, 0x96a63e9cUL, 0x976454abUL, 0x9522eaf2UL, 0x94e080c5UL,
0x9fbcc7f8UL, 0x9e7eadcfUL, 0x9c381396UL, 0x9dfa79a1UL, 0x98b56f24UL,
0x99770513UL, 0x9b31bb4aUL, 0x9af3d17dUL, 0x8d893530UL, 0x8c4b5f07UL,
0x8e0de15eUL, 0x8fcf8b69UL, 0x8a809decUL, 0x8b42f7dbUL, 0x89044982UL,
0x88c623b5UL, 0x839a6488UL, 0x82580ebfUL, 0x801eb0e6UL, 0x81dcdad1UL,
0x8493cc54UL, 0x8551a663UL, 0x8717183aUL, 0x86d5720dUL, 0xa9e2d0a0UL,
0xa820ba97UL, 0xaa6604ceUL, 0xaba46ef9UL, 0xaeeb787cUL, 0xaf29124bUL,
0xad6fac12UL, 0xacadc625UL, 0xa7f18118UL, 0xa633eb2fUL, 0xa4755576UL,
0xa5b73f41UL, 0xa0f829c4UL, 0xa13a43f3UL, 0xa37cfdaaUL, 0xa2be979dUL,
0xb5c473d0UL, 0xb40619e7UL, 0xb640a7beUL, 0xb782cd89UL, 0xb2cddb0cUL,
0xb30fb13bUL, 0xb1490f62UL, 0xb08b6555UL, 0xbbd72268UL, 0xba15485fUL,
0xb853f606UL, 0xb9919c31UL, 0xbcde8ab4UL, 0xbd1ce083UL, 0xbf5a5edaUL,
0xbe9834edUL
},
{
0x00000000UL, 0xb8bc6765UL, 0xaa09c88bUL, 0x12b5afeeUL, 0x8f629757UL,
0x37def032UL, 0x256b5fdcUL, 0x9dd738b9UL, 0xc5b428efUL, 0x7d084f8aUL,
0x6fbde064UL, 0xd7018701UL, 0x4ad6bfb8UL, 0xf26ad8ddUL, 0xe0df7733UL,
0x58631056UL, 0x5019579fUL, 0xe8a530faUL, 0xfa109f14UL, 0x42acf871UL,
0xdf7bc0c8UL, 0x67c7a7adUL, 0x75720843UL, 0xcdce6f26UL, 0x95ad7f70UL,
0x2d111815UL, 0x3fa4b7fbUL, 0x8718d09eUL, 0x1acfe827UL, 0xa2738f42UL,
0xb0c620acUL, 0x087a47c9UL, 0xa032af3eUL, 0x188ec85bUL, 0x0a3b67b5UL,
0xb28700d0UL, 0x2f503869UL, 0x97ec5f0cUL, 0x8559f0e2UL, 0x3de59787UL,
0x658687d1UL, 0xdd3ae0b4UL, 0xcf8f4f5aUL, 0x7733283fUL, 0xeae41086UL,
0x525877e3UL, 0x40edd80dUL, 0xf851bf68UL, 0xf02bf8a1UL, 0x48979fc4UL,
0x5a22302aUL, 0xe29e574fUL, 0x7f496ff6UL, 0xc7f50893UL, 0xd540a77dUL,
0x6dfcc018UL, 0x359fd04eUL, 0x8d23b72bUL, 0x9f9618c5UL, 0x272a7fa0UL,
0xbafd4719UL, 0x0241207cUL, 0x10f48f92UL, 0xa848e8f7UL, 0x9b14583dUL,
0x23a83f58UL, 0x311d90b6UL, 0x89a1f7d3UL, 0x1476cf6aUL, 0xaccaa80fUL,
0xbe7f07e1UL, 0x06c36084UL, 0x5ea070d2UL, 0xe61c17b7UL, 0xf4a9b859UL,
0x4c15df3cUL, 0xd1c2e785UL, 0x697e80e0UL, 0x7bcb2f0eUL, 0xc377486bUL,
0xcb0d0fa2UL, 0x73b168c7UL, 0x6104c729UL, 0xd9b8a04cUL, 0x446f98f5UL,
0xfcd3ff90UL, 0xee66507eUL, 0x56da371bUL, 0x0eb9274dUL, 0xb6054028UL,
0xa4b0efc6UL, 0x1c0c88a3UL, 0x81dbb01aUL, 0x3967d77fUL, 0x2bd27891UL,
0x936e1ff4UL, 0x3b26f703UL, 0x839a9066UL, 0x912f3f88UL, 0x299358edUL,
0xb4446054UL, 0x0cf80731UL, 0x1e4da8dfUL, 0xa6f1cfbaUL, 0xfe92dfecUL,
0x462eb889UL, 0x549b1767UL, 0xec277002UL, 0x71f048bbUL, 0xc94c2fdeUL,
0xdbf98030UL, 0x6345e755UL, 0x6b3fa09cUL, 0xd383c7f9UL, 0xc1366817UL,
0x798a0f72UL, 0xe45d37cbUL, 0x5ce150aeUL, 0x4e54ff40UL, 0xf6e89825UL,
0xae8b8873UL, 0x1637ef16UL, 0x048240f8UL, 0xbc3e279dUL, 0x21e91f24UL,
0x99557841UL, 0x8be0d7afUL, 0x335cb0caUL, 0xed59b63bUL, 0x55e5d15eUL,
0x47507eb0UL, 0xffec19d5UL, 0x623b216cUL, 0xda874609UL, 0xc832e9e7UL,
0x708e8e82UL, 0x28ed9ed4UL, 0x9051f9b1UL, 0x82e4565fUL, 0x3a58313aUL,
0xa78f0983UL, 0x1f336ee6UL, 0x0d86c108UL, 0xb53aa66dUL, 0xbd40e1a4UL,
0x05fc86c1UL, 0x1749292fUL, 0xaff54e4aUL, 0x322276f3UL, 0x8a9e1196UL,
0x982bbe78UL, 0x2097d91dUL, 0x78f4c94bUL, 0xc048ae2eUL, 0xd2fd01c0UL,
0x6a4166a5UL, 0xf7965e1cUL, 0x4f2a3979UL, 0x5d9f9697UL, 0xe523f1f2UL,
0x4d6b1905UL, 0xf5d77e60UL, 0xe762d18eUL, 0x5fdeb6ebUL, 0xc2098e52UL,
0x7ab5e937UL, 0x680046d9UL, 0xd0bc21bcUL, 0x88df31eaUL, 0x3063568fUL,
0x22d6f961UL, 0x9a6a9e04UL, 0x07bda6bdUL, 0xbf01c1d8UL, 0xadb46e36UL,
0x15080953UL, 0x1d724e9aUL, 0xa5ce29ffUL, 0xb77b8611UL, 0x0fc7e174UL,
0x9210d9cdUL, 0x2aacbea8UL, 0x38191146UL, 0x80a57623UL, 0xd8c66675UL,
0x607a0110UL, 0x72cfaefeUL, 0xca73c99bUL, 0x57a4f122UL, 0xef189647UL,
0xfdad39a9UL, 0x45115eccUL, 0x764dee06UL, 0xcef18963UL, 0xdc44268dUL,
0x64f841e8UL, 0xf92f7951UL, 0x41931e34UL, 0x5326b1daUL, 0xeb9ad6bfUL,
0xb3f9c6e9UL, 0x0b45a18cUL, 0x19f00e62UL, 0xa14c6907UL, 0x3c9b51beUL,
0x842736dbUL, 0x96929935UL, 0x2e2efe50UL, 0x2654b999UL, 0x9ee8defcUL,
0x8c5d7112UL, 0x34e11677UL, 0xa9362eceUL, 0x118a49abUL, 0x033fe645UL,
0xbb838120UL, 0xe3e09176UL, 0x5b5cf613UL, 0x49e959fdUL, 0xf1553e98UL,
0x6c820621UL, 0xd43e6144UL, 0xc68bceaaUL, 0x7e37a9cfUL, 0xd67f4138UL,
0x6ec3265dUL, 0x7c7689b3UL, 0xc4caeed6UL, 0x591dd66fUL, 0xe1a1b10aUL,
0xf3141ee4UL, 0x4ba87981UL, 0x13cb69d7UL, 0xab770eb2UL, 0xb9c2a15cUL,
0x017ec639UL, 0x9ca9fe80UL, 0x241599e5UL, 0x36a0360bUL, 0x8e1c516eUL,
0x866616a7UL, 0x3eda71c2UL, 0x2c6fde2cUL, 0x94d3b949UL, 0x090481f0UL,
0xb1b8e695UL, 0xa30d497bUL, 0x1bb12e1eUL, 0x43d23e48UL, 0xfb6e592dUL,
0xe9dbf6c3UL, 0x516791a6UL, 0xccb0a91fUL, 0x740cce7aUL, 0x66b96194UL,
0xde0506f1UL
},
{
0x00000000UL, 0x96300777UL, 0x2c610eeeUL, 0xba510999UL, 0x19c46d07UL,
0x8ff46a70UL, 0x35a563e9UL, 0xa395649eUL, 0x3288db0eUL, 0xa4b8dc79UL,
0x1ee9d5e0UL, 0x88d9d297UL, 0x2b4cb609UL, 0xbd7cb17eUL, 0x072db8e7UL,
0x911dbf90UL, 0x6410b71dUL, 0xf220b06aUL, 0x4871b9f3UL, 0xde41be84UL,
0x7dd4da1aUL, 0xebe4dd6dUL, 0x51b5d4f4UL, 0xc785d383UL, 0x56986c13UL,
0xc0a86b64UL, 0x7af962fdUL, 0xecc9658aUL, 0x4f5c0114UL, 0xd96c0663UL,
0x633d0ffaUL, 0xf50d088dUL, 0xc8206e3bUL, 0x5e10694cUL, 0xe44160d5UL,
0x727167a2UL, 0xd1e4033cUL, 0x47d4044bUL, 0xfd850dd2UL, 0x6bb50aa5UL,
0xfaa8b535UL, 0x6c98b242UL, 0xd6c9bbdbUL, 0x40f9bcacUL, 0xe36cd832UL,
0x755cdf45UL, 0xcf0dd6dcUL, 0x593dd1abUL, 0xac30d926UL, 0x3a00de51UL,
0x8051d7c8UL, 0x1661d0bfUL, 0xb5f4b421UL, 0x23c4b356UL, 0x9995bacfUL,
0x0fa5bdb8UL, 0x9eb80228UL, 0x0888055fUL, 0xb2d90cc6UL, 0x24e90bb1UL,
0x877c6f2fUL, 0x114c6858UL, 0xab1d61c1UL, 0x3d2d66b6UL, 0x9041dc76UL,
0x0671db01UL, 0xbc20d298UL, 0x2a10d5efUL, 0x8985b171UL, 0x1fb5b606UL,
0xa5e4bf9fUL, 0x33d4b8e8UL, 0xa2c90778UL, 0x34f9000fUL, 0x8ea80996UL,
0x18980ee1UL, 0xbb0d6a7fUL, 0x2d3d6d08UL, 0x976c6491UL, 0x015c63e6UL,
0xf4516b6bUL, 0x62616c1cUL, 0xd8306585UL, 0x4e0062f2UL, 0xed95066cUL,
0x7ba5011bUL, 0xc1f40882UL, 0x57c40ff5UL, 0xc6d9b065UL, 0x50e9b712UL,
0xeab8be8bUL, 0x7c88b9fcUL, 0xdf1ddd62UL, 0x492dda15UL, 0xf37cd38cUL,
0x654cd4fbUL, 0x5861b24dUL, 0xce51b53aUL, 0x7400bca3UL, 0xe230bbd4UL,
0x41a5df4aUL, 0xd795d83dUL, 0x6dc4d1a4UL, 0xfbf4d6d3UL, 0x6ae96943UL,
0xfcd96e34UL, 0x468867adUL, 0xd0b860daUL, 0x732d0444UL, 0xe51d0333UL,
0x5f4c0aaaUL, 0xc97c0dddUL, 0x3c710550UL, 0xaa410227UL, 0x10100bbeUL,
0x86200cc9UL, 0x25b56857UL, 0xb3856f20UL, 0x09d466b9UL, 0x9fe461ceUL,
0x0ef9de5eUL, 0x98c9d929UL, 0x2298d0b0UL, 0xb4a8d7c7UL, 0x173db359UL,
0x810db42eUL, 0x3b5cbdb7UL, 0xad6cbac0UL, 0x2083b8edUL, 0xb6b3bf9aUL,
0x0ce2b603UL, 0x9ad2b174UL, 0x3947d5eaUL, 0xaf77d29dUL, 0x1526db04UL,
0x8316dc73UL, 0x120b63e3UL, 0x843b6494UL, 0x3e6a6d0dUL, 0xa85a6a7aUL,
0x0bcf0ee4UL, 0x9dff0993UL, 0x27ae000aUL, 0xb19e077dUL, 0x44930ff0UL,
0xd2a30887UL, 0x68f2011eUL, 0xfec20669UL, 0x5d5762f7UL, 0xcb676580UL,
0x71366c19UL, 0xe7066b6eUL, 0x761bd4feUL, 0xe02bd389UL, 0x5a7ada10UL,
0xcc4add67UL, 0x6fdfb9f9UL, 0xf9efbe8eUL, 0x43beb717UL, 0xd58eb060UL,
0xe8a3d6d6UL, 0x7e93d1a1UL, 0xc4c2d838UL, 0x52f2df4fUL, 0xf167bbd1UL,
0x6757bca6UL, 0xdd06b53fUL, 0x4b36b248UL, 0xda2b0dd8UL, 0x4c1b0aafUL,
0xf64a0336UL, 0x607a0441UL, 0xc3ef60dfUL, 0x55df67a8UL, 0xef8e6e31UL,
0x79be6946UL, 0x8cb361cbUL, 0x1a8366bcUL, 0xa0d26f25UL, 0x36e26852UL,
0x95770cccUL, 0x03470bbbUL, 0xb9160222UL, 0x2f260555UL, 0xbe3bbac5UL,
0x280bbdb2UL, 0x925ab42bUL, 0x046ab35cUL, 0xa7ffd7c2UL, 0x31cfd0b5UL,
0x8b9ed92cUL, 0x1daede5bUL, 0xb0c2649bUL, 0x26f263ecUL, 0x9ca36a75UL,
0x0a936d02UL, 0xa906099cUL, 0x3f360eebUL, 0x85670772UL, 0x13570005UL,
0x824abf95UL, 0x147ab8e2UL, 0xae2bb17bUL, 0x381bb60cUL, 0x9b8ed292UL,
0x0dbed5e5UL, 0xb7efdc7cUL, 0x21dfdb0bUL, 0xd4d2d386UL, 0x42e2d4f1UL,
0xf8b3dd68UL, 0x6e83da1fUL, 0xcd16be81UL, 0x5b26b9f6UL, 0xe177b06fUL,
0x7747b718UL, 0xe65a0888UL, 0x706a0fffUL, 0xca3b0666UL, 0x5c0b0111UL,
0xff9e658fUL, 0x69ae62f8UL, 0xd3ff6b61UL, 0x45cf6c16UL, 0x78e20aa0UL,
0xeed20dd7UL, 0x5483044eUL, 0xc2b30339UL, 0x612667a7UL, 0xf71660d0UL,
0x4d476949UL, 0xdb776e3eUL, 0x4a6ad1aeUL, 0xdc5ad6d9UL, 0x660bdf40UL,
0xf03bd837UL, 0x53aebca9UL, 0xc59ebbdeUL, 0x7fcfb247UL, 0xe9ffb530UL,
0x1cf2bdbdUL, 0x8ac2bacaUL, 0x3093b353UL, 0xa6a3b424UL, 0x0536d0baUL,
0x9306d7cdUL, 0x2957de54UL, 0xbf67d923UL, 0x2e7a66b3UL, 0xb84a61c4UL,
0x021b685dUL, 0x942b6f2aUL, 0x37be0bb4UL, 0xa18e0cc3UL, 0x1bdf055aUL,
0x8def022dUL
},
{
0x00000000UL, 0x41311b19UL, 0x82623632UL, 0xc3532d2bUL, 0x04c56c64UL,
0x45f4777dUL, 0x86a75a56UL, 0xc796414fUL, 0x088ad9c8UL, 0x49bbc2d1UL,
0x8ae8effaUL, 0xcbd9f4e3UL, 0x0c4fb5acUL, 0x4d7eaeb5UL, 0x8e2d839eUL,
0xcf1c9887UL, 0x5112c24aUL, 0x1023d953UL, 0xd370f478UL, 0x9241ef61UL,
0x55d7ae2eUL, 0x14e6b537UL, 0xd7b5981cUL, 0x96848305UL, 0x59981b82UL,
0x18a9009bUL, 0xdbfa2db0UL, 0x9acb36a9UL, 0x5d5d77e6UL, 0x1c6c6cffUL,
0xdf3f41d4UL, 0x9e0e5acdUL, 0xa2248495UL, 0xe3159f8cUL, 0x2046b2a7UL,
0x6177a9beUL, 0xa6e1e8f1UL, 0xe7d0f3e8UL, 0x2483dec3UL, 0x65b2c5daUL,
0xaaae5d5dUL, 0xeb9f4644UL, 0x28cc6b6fUL, 0x69fd7076UL, 0xae6b3139UL,
0xef5a2a20UL, 0x2c09070bUL, 0x6d381c12UL, 0xf33646dfUL, 0xb2075dc6UL,
0x715470edUL, 0x30656bf4UL, 0xf7f32abbUL, 0xb6c231a2UL, 0x75911c89UL,
0x34a00790UL, 0xfbbc9f17UL, 0xba8d840eUL, 0x79dea925UL, 0x38efb23cUL,
0xff79f373UL, 0xbe48e86aUL, 0x7d1bc541UL, 0x3c2ade58UL, 0x054f79f0UL,
0x447e62e9UL, 0x872d4fc2UL, 0xc61c54dbUL, 0x018a1594UL, 0x40bb0e8dUL,
0x83e823a6UL, 0xc2d938bfUL, 0x0dc5a038UL, 0x4cf4bb21UL, 0x8fa7960aUL,
0xce968d13UL, 0x0900cc5cUL, 0x4831d745UL, 0x8b62fa6eUL, 0xca53e177UL,
0x545dbbbaUL, 0x156ca0a3UL, 0xd63f8d88UL, 0x970e9691UL, 0x5098d7deUL,
0x11a9ccc7UL, 0xd2fae1ecUL, 0x93cbfaf5UL, 0x5cd76272UL, 0x1de6796bUL,
0xdeb55440UL, 0x9f844f59UL, 0x58120e16UL, 0x1923150fUL, 0xda703824UL,
0x9b41233dUL, 0xa76bfd65UL, 0xe65ae67cUL, 0x2509cb57UL, 0x6438d04eUL,
0xa3ae9101UL, 0xe29f8a18UL, 0x21cca733UL, 0x60fdbc2aUL, 0xafe124adUL,
0xeed03fb4UL, 0x2d83129fUL, 0x6cb20986UL, 0xab2448c9UL, 0xea1553d0UL,
0x29467efbUL, 0x687765e2UL, 0xf6793f2fUL, 0xb7482436UL, 0x741b091dUL,
0x352a1204UL, 0xf2bc534bUL, 0xb38d4852UL, 0x70de6579UL, 0x31ef7e60UL,
0xfef3e6e7UL, 0xbfc2fdfeUL, 0x7c91d0d5UL, 0x3da0cbccUL, 0xfa368a83UL,
0xbb07919aUL, 0x7854bcb1UL, 0x3965a7a8UL, 0x4b98833bUL, 0x0aa99822UL,
0xc9fab509UL, 0x88cbae10UL, 0x4f5def5fUL, 0x0e6cf446UL, 0xcd3fd96dUL,
0x8c0ec274UL, 0x43125af3UL, 0x022341eaUL, 0xc1706cc1UL, 0x804177d8UL,
0x47d73697UL, 0x06e62d8eUL, 0xc5b500a5UL, 0x84841bbcUL, 0x1a8a4171UL,
0x5bbb5a68UL, 0x98e87743UL, 0xd9d96c5aUL, 0x1e4f2d15UL, 0x5f7e360cUL,
0x9c2d1b27UL, 0xdd1c003eUL, 0x120098b9UL, 0x533183a0UL, 0x9062ae8bUL,
0xd153b592UL, 0x16c5f4ddUL, 0x57f4efc4UL, 0x94a7c2efUL, 0xd596d9f6UL,
0xe9bc07aeUL, 0xa88d1cb7UL, 0x6bde319cUL, 0x2aef2a85UL, 0xed796bcaUL,
0xac4870d3UL, 0x6f1b5df8UL, 0x2e2a46e1UL, 0xe136de66UL, 0xa007c57fUL,
0x6354e854UL, 0x2265f34dUL, 0xe5f3b202UL, 0xa4c2a91bUL, 0x67918430UL,
0x26a09f29UL, 0xb8aec5e4UL, 0xf99fdefdUL, 0x3accf3d6UL, 0x7bfde8cfUL,
0xbc6ba980UL, 0xfd5ab299UL, 0x3e099fb2UL, 0x7f3884abUL, 0xb0241c2cUL,
0xf1150735UL, 0x32462a1eUL, 0x73773107UL, 0xb4e17048UL, 0xf5d06b51UL,
0x3683467aUL, 0x77b25d63UL, 0x4ed7facbUL, 0x0fe6e1d2UL, 0xccb5ccf9UL,
0x8d84d7e0UL, 0x4a1296afUL, 0x0b238db6UL, 0xc870a09dUL, 0x8941bb84UL,
0x465d2303UL, 0x076c381aUL, 0xc43f1531UL, 0x850e0e28UL, 0x42984f67UL,
0x03a9547eUL, 0xc0fa7955UL, 0x81cb624cUL, 0x1fc53881UL, 0x5ef42398UL,
0x9da70eb3UL, 0xdc9615aaUL, 0x1b0054e5UL, 0x5a314ffcUL, 0x996262d7UL,
0xd85379ceUL, 0x174fe149UL, 0x567efa50UL, 0x952dd77bUL, 0xd41ccc62UL,
0x138a8d2dUL, 0x52bb9634UL, 0x91e8bb1fUL, 0xd0d9a006UL, 0xecf37e5eUL,
0xadc26547UL, 0x6e91486cUL, 0x2fa05375UL, 0xe836123aUL, 0xa9070923UL,
0x6a542408UL, 0x2b653f11UL, 0xe479a796UL, 0xa548bc8fUL, 0x661b91a4UL,
0x272a8abdUL, 0xe0bccbf2UL, 0xa18dd0ebUL, 0x62defdc0UL, 0x23efe6d9UL,
0xbde1bc14UL, 0xfcd0a70dUL, 0x3f838a26UL, 0x7eb2913fUL, 0xb924d070UL,
0xf815cb69UL, 0x3b46e642UL, 0x7a77fd5bUL, 0xb56b65dcUL, 0xf45a7ec5UL,
0x370953eeUL, 0x763848f7UL, 0xb1ae09b8UL, 0xf09f12a1UL, 0x33cc3f8aUL,
0x72fd2493UL
},
{
0x00000000UL, 0x376ac201UL, 0x6ed48403UL, 0x59be4602UL, 0xdca80907UL,
0xebc2cb06UL, 0xb27c8d04UL, 0x85164f05UL, 0xb851130eUL, 0x8f3bd10fUL,
0xd685970dUL, 0xe1ef550cUL, 0x64f91a09UL, 0x5393d808UL, 0x0a2d9e0aUL,
0x3d475c0bUL, 0x70a3261cUL, 0x47c9e41dUL, 0x1e77a21fUL, 0x291d601eUL,
0xac0b2f1bUL, 0x9b61ed1aUL, 0xc2dfab18UL, 0xf5b56919UL, 0xc8f23512UL,
0xff98f713UL, 0xa626b111UL, 0x914c7310UL, 0x145a3c15UL, 0x2330fe14UL,
0x7a8eb816UL, 0x4de47a17UL, 0xe0464d38UL, 0xd72c8f39UL, 0x8e92c93bUL,
0xb9f80b3aUL, 0x3cee443fUL, 0x0b84863eUL, 0x523ac03cUL, 0x6550023dUL,
0x58175e36UL, 0x6f7d9c37UL, 0x36c3da35UL, 0x01a91834UL, 0x84bf5731UL,
0xb3d59530UL, 0xea6bd332UL, 0xdd011133UL, 0x90e56b24UL, 0xa78fa925UL,
0xfe31ef27UL, 0xc95b2d26UL, 0x4c4d6223UL, 0x7b27a022UL, 0x2299e620UL,
0x15f32421UL, 0x28b4782aUL, 0x1fdeba2bUL, 0x4660fc29UL, 0x710a3e28UL,
0xf41c712dUL, 0xc376b32cUL, 0x9ac8f52eUL, 0xada2372fUL, 0xc08d9a70UL,
0xf7e75871UL, 0xae591e73UL, 0x9933dc72UL, 0x1c259377UL, 0x2b4f5176UL,
0x72f11774UL, 0x459bd575UL, 0x78dc897eUL, 0x4fb64b7fUL, 0x16080d7dUL,
0x2162cf7cUL, 0xa4748079UL, 0x931e4278UL, 0xcaa0047aUL, 0xfdcac67bUL,
0xb02ebc6cUL, 0x87447e6dUL, 0xdefa386fUL, 0xe990fa6eUL, 0x6c86b56bUL,
0x5bec776aUL, 0x02523168UL, 0x3538f369UL, 0x087faf62UL, 0x3f156d63UL,
0x66ab2b61UL, 0x51c1e960UL, 0xd4d7a665UL, 0xe3bd6464UL, 0xba032266UL,
0x8d69e067UL, 0x20cbd748UL, 0x17a11549UL, 0x4e1f534bUL, 0x7975914aUL,
0xfc63de4fUL, 0xcb091c4eUL, 0x92b75a4cUL, 0xa5dd984dUL, 0x989ac446UL,
0xaff00647UL, 0xf64e4045UL, 0xc1248244UL, 0x4432cd41UL, 0x73580f40UL,
0x2ae64942UL, 0x1d8c8b43UL, 0x5068f154UL, 0x67023355UL, 0x3ebc7557UL,
0x09d6b756UL, 0x8cc0f853UL, 0xbbaa3a52UL, 0xe2147c50UL, 0xd57ebe51UL,
0xe839e25aUL, 0xdf53205bUL, 0x86ed6659UL, 0xb187a458UL, 0x3491eb5dUL,
0x03fb295cUL, 0x5a456f5eUL, 0x6d2fad5fUL, 0x801b35e1UL, 0xb771f7e0UL,
0xeecfb1e2UL, 0xd9a573e3UL, 0x5cb33ce6UL, 0x6bd9fee7UL, 0x3267b8e5UL,
0x050d7ae4UL, 0x384a26efUL, 0x0f20e4eeUL, 0x569ea2ecUL, 0x61f460edUL,
0xe4e22fe8UL, 0xd388ede9UL, 0x8a36abebUL, 0xbd5c69eaUL, 0xf0b813fdUL,
0xc7d2d1fcUL, 0x9e6c97feUL, 0xa90655ffUL, 0x2c101afaUL, 0x1b7ad8fbUL,
0x42c49ef9UL, 0x75ae5cf8UL, 0x48e900f3UL, 0x7f83c2f2UL, 0x263d84f0UL,
0x115746f1UL, 0x944109f4UL, 0xa32bcbf5UL, 0xfa958df7UL, 0xcdff4ff6UL,
0x605d78d9UL, 0x5737bad8UL, 0x0e89fcdaUL, 0x39e33edbUL, 0xbcf571deUL,
0x8b9fb3dfUL, 0xd221f5ddUL, 0xe54b37dcUL, 0xd80c6bd7UL, 0xef66a9d6UL,
0xb6d8efd4UL, 0x81b22dd5UL, 0x04a462d0UL, 0x33cea0d1UL, 0x6a70e6d3UL,
0x5d1a24d2UL, 0x10fe5ec5UL, 0x27949cc4UL, 0x7e2adac6UL, 0x494018c7UL,
0xcc5657c2UL, 0xfb3c95c3UL, 0xa282d3c1UL, 0x95e811c0UL, 0xa8af4dcbUL,
0x9fc58fcaUL, 0xc67bc9c8UL, 0xf1110bc9UL, 0x740744ccUL, 0x436d86cdUL,
0x1ad3c0cfUL, 0x2db902ceUL, 0x4096af91UL, 0x77fc6d90UL, 0x2e422b92UL,
0x1928e993UL, 0x9c3ea696UL, 0xab546497UL, 0xf2ea2295UL, 0xc580e094UL,
0xf8c7bc9fUL, 0xcfad7e9eUL, 0x9613389cUL, 0xa179fa9dUL, 0x246fb598UL,
0x13057799UL, 0x4abb319bUL, 0x7dd1f39aUL, 0x3035898dUL, 0x075f4b8cUL,
0x5ee10d8eUL, 0x698bcf8fUL, 0xec9d808aUL, 0xdbf7428bUL, 0x82490489UL,
0xb523c688UL, 0x88649a83UL, 0xbf0e5882UL, 0xe6b01e80UL, 0xd1dadc81UL,
0x54cc9384UL, 0x63a65185UL, 0x3a181787UL, 0x0d72d586UL, 0xa0d0e2a9UL,
0x97ba20a8UL, 0xce0466aaUL, 0xf96ea4abUL, 0x7c78ebaeUL, 0x4b1229afUL,
0x12ac6fadUL, 0x25c6adacUL, 0x1881f1a7UL, 0x2feb33a6UL, 0x765575a4UL,
0x413fb7a5UL, 0xc429f8a0UL, 0xf3433aa1UL, 0xaafd7ca3UL, 0x9d97bea2UL,
0xd073c4b5UL, 0xe71906b4UL, 0xbea740b6UL, 0x89cd82b7UL, 0x0cdbcdb2UL,
0x3bb10fb3UL, 0x620f49b1UL, 0x55658bb0UL, 0x6822d7bbUL, 0x5f4815baUL,
0x06f653b8UL, 0x319c91b9UL, 0xb48adebcUL, 0x83e01cbdUL, 0xda5e5abfUL,
0xed3498beUL
},
{
0x00000000UL, 0x6567bcb8UL, 0x8bc809aaUL, 0xeeafb512UL, 0x5797628fUL,
0x32f0de37UL, 0xdc5f6b25UL, 0xb938d79dUL, 0xef28b4c5UL, 0x8a4f087dUL,
0x64e0bd6fUL, 0x018701d7UL, 0xb8bfd64aUL, 0xddd86af2UL, 0x3377dfe0UL,
0x56106358UL, 0x9f571950UL, 0xfa30a5e8UL, 0x149f10faUL, 0x71f8ac42UL,
0xc8c07bdfUL, 0xada7c767UL, 0x43087275UL, 0x266fcecdUL, 0x707fad95UL,
0x1518112dUL, 0xfbb7a43fUL, 0x9ed01887UL, 0x27e8cf1aUL, 0x428f73a2UL,
0xac20c6b0UL, 0xc9477a08UL, 0x3eaf32a0UL, 0x5bc88e18UL, 0xb5673b0aUL,
0xd00087b2UL, 0x6938502fUL, 0x0c5fec97UL, 0xe2f05985UL, 0x8797e53dUL,
0xd1878665UL, 0xb4e03addUL, 0x5a4f8fcfUL, 0x3f283377UL, 0x8610e4eaUL,
0xe3775852UL, 0x0dd8ed40UL, 0x68bf51f8UL, 0xa1f82bf0UL, 0xc49f9748UL,
0x2a30225aUL, 0x4f579ee2UL, 0xf66f497fUL, 0x9308f5c7UL, 0x7da740d5UL,
0x18c0fc6dUL, 0x4ed09f35UL, 0x2bb7238dUL, 0xc518969fUL, 0xa07f2a27UL,
0x1947fdbaUL, 0x7c204102UL, 0x928ff410UL, 0xf7e848a8UL, 0x3d58149bUL,
0x583fa823UL, 0xb6901d31UL, 0xd3f7a189UL, 0x6acf7614UL, 0x0fa8caacUL,
0xe1077fbeUL, 0x8460c306UL, 0xd270a05eUL, 0xb7171ce6UL, 0x59b8a9f4UL,
0x3cdf154cUL, 0x85e7c2d1UL, 0xe0807e69UL, 0x0e2fcb7bUL, 0x6b4877c3UL,
0xa20f0dcbUL, 0xc768b173UL, 0x29c70461UL, 0x4ca0b8d9UL, 0xf5986f44UL,
0x90ffd3fcUL, 0x7e5066eeUL, 0x1b37da56UL, 0x4d27b90eUL, 0x284005b6UL,
0xc6efb0a4UL, 0xa3880c1cUL, 0x1ab0db81UL, 0x7fd76739UL, 0x9178d22bUL,
0xf41f6e93UL, 0x03f7263bUL, 0x66909a83UL, 0x883f2f91UL, 0xed589329UL,
0x546044b4UL, 0x3107f80cUL, 0xdfa84d1eUL, 0xbacff1a6UL, 0xecdf92feUL,
0x89b82e46UL, 0x67179b54UL, 0x027027ecUL, 0xbb48f071UL, 0xde2f4cc9UL,
0x3080f9dbUL, 0x55e74563UL, 0x9ca03f6bUL, 0xf9c783d3UL, 0x176836c1UL,
0x720f8a79UL, 0xcb375de4UL, 0xae50e15cUL, 0x40ff544eUL, 0x2598e8f6UL,
0x73888baeUL, 0x16ef3716UL, 0xf8408204UL, 0x9d273ebcUL, 0x241fe921UL,
0x41785599UL, 0xafd7e08bUL, 0xcab05c33UL, 0x3bb659edUL, 0x5ed1e555UL,
0xb07e5047UL, 0xd519ecffUL, 0x6c213b62UL, 0x094687daUL, 0xe7e932c8UL,
0x828e8e70UL, 0xd49eed28UL, 0xb1f95190UL, 0x5f56e482UL, 0x3a31583aUL,
0x83098fa7UL, 0xe66e331fUL, 0x08c1860dUL, 0x6da63ab5UL, 0xa4e140bdUL,
0xc186fc05UL, 0x2f294917UL, 0x4a4ef5afUL, 0xf3762232UL, 0x96119e8aUL,
0x78be2b98UL, 0x1dd99720UL, 0x4bc9f478UL, 0x2eae48c0UL, 0xc001fdd2UL,
0xa566416aUL, 0x1c5e96f7UL, 0x79392a4fUL, 0x97969f5dUL, 0xf2f123e5UL,
0x05196b4dUL, 0x607ed7f5UL, 0x8ed162e7UL, 0xebb6de5fUL, 0x528e09c2UL,
0x37e9b57aUL, 0xd9460068UL, 0xbc21bcd0UL, 0xea31df88UL, 0x8f566330UL,
0x61f9d622UL, 0x049e6a9aUL, 0xbda6bd07UL, 0xd8c101bfUL, 0x366eb4adUL,
0x53090815UL, 0x9a4e721dUL, 0xff29cea5UL, 0x11867bb7UL, 0x74e1c70fUL,
0xcdd91092UL, 0xa8beac2aUL, 0x46111938UL, 0x2376a580UL, 0x7566c6d8UL,
0x10017a60UL, 0xfeaecf72UL, 0x9bc973caUL, 0x22f1a457UL, 0x479618efUL,
0xa939adfdUL, 0xcc5e1145UL, 0x06ee4d76UL, 0x6389f1ceUL, 0x8d2644dcUL,
0xe841f864UL, 0x51792ff9UL, 0x341e9341UL, 0xdab12653UL, 0xbfd69aebUL,
0xe9c6f9b3UL, 0x8ca1450bUL, 0x620ef019UL, 0x07694ca1UL, 0xbe519b3cUL,
0xdb362784UL, 0x35999296UL, 0x50fe2e2eUL, 0x99b95426UL, 0xfcdee89eUL,
0x12715d8cUL, 0x7716e134UL, 0xce2e36a9UL, 0xab498a11UL, 0x45e63f03UL,
0x208183bbUL, 0x7691e0e3UL, 0x13f65c5bUL, 0xfd59e949UL, 0x983e55f1UL,
0x2106826cUL, 0x44613ed4UL, 0xaace8bc6UL, 0xcfa9377eUL, 0x38417fd6UL,
0x5d26c36eUL, 0xb389767cUL, 0xd6eecac4UL, 0x6fd61d59UL, 0x0ab1a1e1UL,
0xe41e14f3UL, 0x8179a84bUL, 0xd769cb13UL, 0xb20e77abUL, 0x5ca1c2b9UL,
0x39c67e01UL, 0x80fea99cUL, 0xe5991524UL, 0x0b36a036UL, 0x6e511c8eUL,
0xa7166686UL, 0xc271da3eUL, 0x2cde6f2cUL, 0x49b9d394UL, 0xf0810409UL,
0x95e6b8b1UL, 0x7b490da3UL, 0x1e2eb11bUL, 0x483ed243UL, 0x2d596efbUL,
0xc3f6dbe9UL, 0xa6916751UL, 0x1fa9b0ccUL, 0x7ace0c74UL, 0x9461b966UL,
0xf10605deUL
#endif
}
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,346 +0,0 @@
/* deflate.h -- internal compression state
* Copyright (C) 1995-2012 Jean-loup Gailly
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* WARNING: this file should *not* be used by applications. It is
part of the implementation of the compression library and is
subject to change. Applications should only use zlib.h.
*/
/* @(#) $Id$ */
#ifndef DEFLATE_H
#define DEFLATE_H
#include "zutil.h"
/* define NO_GZIP when compiling if you want to disable gzip header and
trailer creation by deflate(). NO_GZIP would be used to avoid linking in
the crc code when it is not needed. For shared libraries, gzip encoding
should be left enabled. */
#ifndef NO_GZIP
# define GZIP
#endif
/* ===========================================================================
* Internal compression state.
*/
#define LENGTH_CODES 29
/* number of length codes, not counting the special END_BLOCK code */
#define LITERALS 256
/* number of literal bytes 0..255 */
#define L_CODES (LITERALS+1+LENGTH_CODES)
/* number of Literal or Length codes, including the END_BLOCK code */
#define D_CODES 30
/* number of distance codes */
#define BL_CODES 19
/* number of codes used to transfer the bit lengths */
#define HEAP_SIZE (2*L_CODES+1)
/* maximum heap size */
#define MAX_BITS 15
/* All codes must not exceed MAX_BITS bits */
#define Buf_size 16
/* size of bit buffer in bi_buf */
#define INIT_STATE 42
#define EXTRA_STATE 69
#define NAME_STATE 73
#define COMMENT_STATE 91
#define HCRC_STATE 103
#define BUSY_STATE 113
#define FINISH_STATE 666
/* Stream status */
/* Data structure describing a single value and its code string. */
typedef struct ct_data_s {
union {
ush freq; /* frequency count */
ush code; /* bit string */
} fc;
union {
ush dad; /* father node in Huffman tree */
ush len; /* length of bit string */
} dl;
} FAR ct_data;
#define Freq fc.freq
#define Code fc.code
#define Dad dl.dad
#define Len dl.len
typedef struct static_tree_desc_s static_tree_desc;
typedef struct tree_desc_s {
ct_data *dyn_tree; /* the dynamic tree */
int max_code; /* largest code with non zero frequency */
static_tree_desc *stat_desc; /* the corresponding static tree */
} FAR tree_desc;
typedef ush Pos;
typedef Pos FAR Posf;
typedef unsigned IPos;
/* A Pos is an index in the character window. We use short instead of int to
* save space in the various tables. IPos is used only for parameter passing.
*/
typedef struct internal_state {
z_streamp strm; /* pointer back to this zlib stream */
int status; /* as the name implies */
Bytef *pending_buf; /* output still pending */
ulg pending_buf_size; /* size of pending_buf */
Bytef *pending_out; /* next pending byte to output to the stream */
uInt pending; /* nb of bytes in the pending buffer */
int wrap; /* bit 0 true for zlib, bit 1 true for gzip */
gz_headerp gzhead; /* gzip header information to write */
uInt gzindex; /* where in extra, name, or comment */
Byte method; /* can only be DEFLATED */
int last_flush; /* value of flush param for previous deflate call */
/* used by deflate.c: */
uInt w_size; /* LZ77 window size (32K by default) */
uInt w_bits; /* log2(w_size) (8..16) */
uInt w_mask; /* w_size - 1 */
Bytef *window;
/* Sliding window. Input bytes are read into the second half of the window,
* and move to the first half later to keep a dictionary of at least wSize
* bytes. With this organization, matches are limited to a distance of
* wSize-MAX_MATCH bytes, but this ensures that IO is always
* performed with a length multiple of the block size. Also, it limits
* the window size to 64K, which is quite useful on MSDOS.
* To do: use the user input buffer as sliding window.
*/
ulg window_size;
/* Actual size of window: 2*wSize, except when the user input buffer
* is directly used as sliding window.
*/
Posf *prev;
/* Link to older string with same hash index. To limit the size of this
* array to 64K, this link is maintained only for the last 32K strings.
* An index in this array is thus a window index modulo 32K.
*/
Posf *head; /* Heads of the hash chains or NIL. */
uInt ins_h; /* hash index of string to be inserted */
uInt hash_size; /* number of elements in hash table */
uInt hash_bits; /* log2(hash_size) */
uInt hash_mask; /* hash_size-1 */
uInt hash_shift;
/* Number of bits by which ins_h must be shifted at each input
* step. It must be such that after MIN_MATCH steps, the oldest
* byte no longer takes part in the hash key, that is:
* hash_shift * MIN_MATCH >= hash_bits
*/
long block_start;
/* Window position at the beginning of the current output block. Gets
* negative when the window is moved backwards.
*/
uInt match_length; /* length of best match */
IPos prev_match; /* previous match */
int match_available; /* set if previous match exists */
uInt strstart; /* start of string to insert */
uInt match_start; /* start of matching string */
uInt lookahead; /* number of valid bytes ahead in window */
uInt prev_length;
/* Length of the best match at previous step. Matches not greater than this
* are discarded. This is used in the lazy match evaluation.
*/
uInt max_chain_length;
/* To speed up deflation, hash chains are never searched beyond this
* length. A higher limit improves compression ratio but degrades the
* speed.
*/
uInt max_lazy_match;
/* Attempt to find a better match only when the current match is strictly
* smaller than this value. This mechanism is used only for compression
* levels >= 4.
*/
# define max_insert_length max_lazy_match
/* Insert new strings in the hash table only if the match length is not
* greater than this length. This saves time but degrades compression.
* max_insert_length is used only for compression levels <= 3.
*/
int level; /* compression level (1..9) */
int strategy; /* favor or force Huffman coding*/
uInt good_match;
/* Use a faster search when the previous match is longer than this */
int nice_match; /* Stop searching when current match exceeds this */
/* used by trees.c: */
/* Didn't use ct_data typedef below to suppress compiler warning */
struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */
struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */
struct tree_desc_s l_desc; /* desc. for literal tree */
struct tree_desc_s d_desc; /* desc. for distance tree */
struct tree_desc_s bl_desc; /* desc. for bit length tree */
ush bl_count[MAX_BITS+1];
/* number of codes at each bit length for an optimal tree */
int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */
int heap_len; /* number of elements in the heap */
int heap_max; /* element of largest frequency */
/* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
* The same heap array is used to build all trees.
*/
uch depth[2*L_CODES+1];
/* Depth of each subtree used as tie breaker for trees of equal frequency
*/
uchf *l_buf; /* buffer for literals or lengths */
uInt lit_bufsize;
/* Size of match buffer for literals/lengths. There are 4 reasons for
* limiting lit_bufsize to 64K:
* - frequencies can be kept in 16 bit counters
* - if compression is not successful for the first block, all input
* data is still in the window so we can still emit a stored block even
* when input comes from standard input. (This can also be done for
* all blocks if lit_bufsize is not greater than 32K.)
* - if compression is not successful for a file smaller than 64K, we can
* even emit a stored file instead of a stored block (saving 5 bytes).
* This is applicable only for zip (not gzip or zlib).
* - creating new Huffman trees less frequently may not provide fast
* adaptation to changes in the input data statistics. (Take for
* example a binary file with poorly compressible code followed by
* a highly compressible string table.) Smaller buffer sizes give
* fast adaptation but have of course the overhead of transmitting
* trees more frequently.
* - I can't count above 4
*/
uInt last_lit; /* running index in l_buf */
ushf *d_buf;
/* Buffer for distances. To simplify the code, d_buf and l_buf have
* the same number of elements. To use different lengths, an extra flag
* array would be necessary.
*/
ulg opt_len; /* bit length of current block with optimal trees */
ulg static_len; /* bit length of current block with static trees */
uInt matches; /* number of string matches in current block */
uInt insert; /* bytes at end of window left to insert */
#ifdef DEBUG
ulg compressed_len; /* total bit length of compressed file mod 2^32 */
ulg bits_sent; /* bit length of compressed data sent mod 2^32 */
#endif
ush bi_buf;
/* Output buffer. bits are inserted starting at the bottom (least
* significant bits).
*/
int bi_valid;
/* Number of valid bits in bi_buf. All bits above the last valid bit
* are always zero.
*/
ulg high_water;
/* High water mark offset in window for initialized bytes -- bytes above
* this are set to zero in order to avoid memory check warnings when
* longest match routines access bytes past the input. This is then
* updated to the new high water mark.
*/
} FAR deflate_state;
/* Output a byte on the stream.
* IN assertion: there is enough room in pending_buf.
*/
#define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
/* Minimum amount of lookahead, except at the end of the input file.
* See deflate.c for comments about the MIN_MATCH+1.
*/
#define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD)
/* In order to simplify the code, particularly on 16 bit machines, match
* distances are limited to MAX_DIST instead of WSIZE.
*/
#define WIN_INIT MAX_MATCH
/* Number of bytes after end of data in window to initialize in order to avoid
memory checker errors from longest match routines */
/* in trees.c */
void ZLIB_INTERNAL _tr_init OF((deflate_state *s));
int ZLIB_INTERNAL _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc));
void ZLIB_INTERNAL _tr_flush_block OF((deflate_state *s, charf *buf,
ulg stored_len, int last));
void ZLIB_INTERNAL _tr_flush_bits OF((deflate_state *s));
void ZLIB_INTERNAL _tr_align OF((deflate_state *s));
void ZLIB_INTERNAL _tr_stored_block OF((deflate_state *s, charf *buf,
ulg stored_len, int last));
#define d_code(dist) \
((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)])
/* Mapping from a distance to a distance code. dist is the distance - 1 and
* must not have side effects. _dist_code[256] and _dist_code[257] are never
* used.
*/
#ifndef DEBUG
/* Inline versions of _tr_tally for speed: */
#if defined(GEN_TREES_H) || !defined(STDC)
extern uch ZLIB_INTERNAL _length_code[];
extern uch ZLIB_INTERNAL _dist_code[];
#else
extern const uch ZLIB_INTERNAL _length_code[];
extern const uch ZLIB_INTERNAL _dist_code[];
#endif
# define _tr_tally_lit(s, c, flush) \
{ uch cc = (c); \
s->d_buf[s->last_lit] = 0; \
s->l_buf[s->last_lit++] = cc; \
s->dyn_ltree[cc].Freq++; \
flush = (s->last_lit == s->lit_bufsize-1); \
}
# define _tr_tally_dist(s, distance, length, flush) \
{ uch len = (length); \
ush dist = (distance); \
s->d_buf[s->last_lit] = dist; \
s->l_buf[s->last_lit++] = len; \
dist--; \
s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
s->dyn_dtree[d_code(dist)].Freq++; \
flush = (s->last_lit == s->lit_bufsize-1); \
}
#else
# define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c)
# define _tr_tally_dist(s, distance, length, flush) \
flush = _tr_tally(s, distance, length)
#endif
#endif /* DEFLATE_H */

View File

@@ -1,209 +0,0 @@
/* gzguts.h -- zlib internal header definitions for gz* operations
* Copyright (C) 2004, 2005, 2010, 2011, 2012, 2013 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
#ifdef _LARGEFILE64_SOURCE
# ifndef _LARGEFILE_SOURCE
# define _LARGEFILE_SOURCE 1
# endif
# ifdef _FILE_OFFSET_BITS
# undef _FILE_OFFSET_BITS
# endif
#endif
#ifdef HAVE_HIDDEN
# define ZLIB_INTERNAL __attribute__((visibility ("hidden")))
#else
# define ZLIB_INTERNAL
#endif
#include <stdio.h>
#include "zlib.h"
#ifdef STDC
# include <string.h>
# include <stdlib.h>
# include <limits.h>
#endif
#include <fcntl.h>
#ifdef _WIN32
# include <stddef.h>
#endif
#if defined(__TURBOC__) || defined(_MSC_VER) || defined(_WIN32)
# include <io.h>
#endif
#ifdef WINAPI_FAMILY
# define open _open
# define read _read
# define write _write
# define close _close
#endif
#ifdef NO_DEFLATE /* for compatibility with old definition */
# define NO_GZCOMPRESS
#endif
#if defined(STDC99) || (defined(__TURBOC__) && __TURBOC__ >= 0x550)
# ifndef HAVE_VSNPRINTF
# define HAVE_VSNPRINTF
# endif
#endif
#if defined(__CYGWIN__)
# ifndef HAVE_VSNPRINTF
# define HAVE_VSNPRINTF
# endif
#endif
#if defined(MSDOS) && defined(__BORLANDC__) && (BORLANDC > 0x410)
# ifndef HAVE_VSNPRINTF
# define HAVE_VSNPRINTF
# endif
#endif
#ifndef HAVE_VSNPRINTF
# ifdef MSDOS
/* vsnprintf may exist on some MS-DOS compilers (DJGPP?),
but for now we just assume it doesn't. */
# define NO_vsnprintf
# endif
# ifdef __TURBOC__
# define NO_vsnprintf
# endif
# ifdef WIN32
/* In Win32, vsnprintf is available as the "non-ANSI" _vsnprintf. */
# if !defined(vsnprintf) && !defined(NO_vsnprintf)
# if !defined(_MSC_VER) || ( defined(_MSC_VER) && _MSC_VER < 1500 )
# define vsnprintf _vsnprintf
# endif
# endif
# endif
# ifdef __SASC
# define NO_vsnprintf
# endif
# ifdef VMS
# define NO_vsnprintf
# endif
# ifdef __OS400__
# define NO_vsnprintf
# endif
# ifdef __MVS__
# define NO_vsnprintf
# endif
#endif
/* unlike snprintf (which is required in C99, yet still not supported by
Microsoft more than a decade later!), _snprintf does not guarantee null
termination of the result -- however this is only used in gzlib.c where
the result is assured to fit in the space provided */
#ifdef _MSC_VER
# define snprintf _snprintf
#endif
#ifndef local
# define local static
#endif
/* compile with -Dlocal if your debugger can't find static symbols */
/* gz* functions always use library allocation functions */
#ifndef STDC
extern voidp malloc OF((uInt size));
extern void free OF((voidpf ptr));
#endif
/* get errno and strerror definition */
#if defined UNDER_CE
# include <windows.h>
# define zstrerror() gz_strwinerror((DWORD)GetLastError())
#else
# ifndef NO_STRERROR
# include <errno.h>
# define zstrerror() strerror(errno)
# else
# define zstrerror() "stdio error (consult errno)"
# endif
#endif
/* provide prototypes for these when building zlib without LFS */
#if !defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0
ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
#endif
/* default memLevel */
#if MAX_MEM_LEVEL >= 8
# define DEF_MEM_LEVEL 8
#else
# define DEF_MEM_LEVEL MAX_MEM_LEVEL
#endif
/* default i/o buffer size -- double this for output when reading (this and
twice this must be able to fit in an unsigned type) */
#define GZBUFSIZE 8192
/* gzip modes, also provide a little integrity check on the passed structure */
#define GZ_NONE 0
#define GZ_READ 7247
#define GZ_WRITE 31153
#define GZ_APPEND 1 /* mode set to GZ_WRITE after the file is opened */
/* values for gz_state how */
#define LOOK 0 /* look for a gzip header */
#define COPY 1 /* copy input directly */
#define GZIP 2 /* decompress a gzip stream */
/* internal gzip file state data structure */
typedef struct {
/* exposed contents for gzgetc() macro */
struct gzFile_s x; /* "x" for exposed */
/* x.have: number of bytes available at x.next */
/* x.next: next output data to deliver or write */
/* x.pos: current position in uncompressed data */
/* used for both reading and writing */
int mode; /* see gzip modes above */
int fd; /* file descriptor */
char *path; /* path or fd for error messages */
unsigned size; /* buffer size, zero if not allocated yet */
unsigned want; /* requested buffer size, default is GZBUFSIZE */
unsigned char *in; /* input buffer */
unsigned char *out; /* output buffer (double-sized when reading) */
int direct; /* 0 if processing gzip, 1 if transparent */
/* just for reading */
int how; /* 0: get header, 1: copy, 2: decompress */
z_off64_t start; /* where the gzip data started, for rewinding */
int eof; /* true if end of input file reached */
int past; /* true if read requested past end */
/* just for writing */
int level; /* compression level */
int strategy; /* compression strategy */
/* seek request */
z_off64_t skip; /* amount to skip (already rewound if backwards) */
int seek; /* true if seek request pending */
/* error information */
int err; /* error code */
char *msg; /* error message */
/* zlib inflate or deflate stream */
z_stream strm; /* stream structure in-place (not a pointer) */
} gz_state;
typedef gz_state FAR *gz_statep;
/* shared functions */
void ZLIB_INTERNAL gz_error OF((gz_statep, int, const char *));
#if defined UNDER_CE
char ZLIB_INTERNAL *gz_strwinerror OF((DWORD error));
#endif
/* GT_OFF(x), where x is an unsigned value, is true if x > maximum z_off64_t
value -- needed when comparing unsigned to z_off64_t, which is signed
(possible z_off64_t types off_t, off64_t, and long are all signed) */
#ifdef INT_MAX
# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > INT_MAX)
#else
unsigned ZLIB_INTERNAL gz_intmax OF((void));
# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > gz_intmax())
#endif

View File

@@ -1,640 +0,0 @@
/* infback.c -- inflate using a call-back interface
* Copyright (C) 1995-2011 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/*
This code is largely copied from inflate.c. Normally either infback.o or
inflate.o would be linked into an application--not both. The interface
with inffast.c is retained so that optimized assembler-coded versions of
inflate_fast() can be used with either inflate.c or infback.c.
*/
#include "zutil.h"
#include "inftrees.h"
#include "inflate.h"
#include "inffast.h"
/* function prototypes */
local void fixedtables OF((struct inflate_state FAR *state));
/*
strm provides memory allocation functions in zalloc and zfree, or
Z_NULL to use the library memory allocation functions.
windowBits is in the range 8..15, and window is a user-supplied
window and output buffer that is 2**windowBits bytes.
*/
int ZEXPORT inflateBackInit_(strm, windowBits, window, version, stream_size)
z_streamp strm;
int windowBits;
unsigned char FAR *window;
const char *version;
int stream_size;
{
struct inflate_state FAR *state;
if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
stream_size != (int)(sizeof(z_stream)))
return Z_VERSION_ERROR;
if (strm == Z_NULL || window == Z_NULL ||
windowBits < 8 || windowBits > 15)
return Z_STREAM_ERROR;
strm->msg = Z_NULL; /* in case we return an error */
if (strm->zalloc == (alloc_func)0) {
#ifdef Z_SOLO
return Z_STREAM_ERROR;
#else
strm->zalloc = zcalloc;
strm->opaque = (voidpf)0;
#endif
}
if (strm->zfree == (free_func)0)
#ifdef Z_SOLO
return Z_STREAM_ERROR;
#else
strm->zfree = zcfree;
#endif
state = (struct inflate_state FAR *)ZALLOC(strm, 1,
sizeof(struct inflate_state));
if (state == Z_NULL) return Z_MEM_ERROR;
Tracev((stderr, "inflate: allocated\n"));
strm->state = (struct internal_state FAR *)state;
state->dmax = 32768U;
state->wbits = windowBits;
state->wsize = 1U << windowBits;
state->window = window;
state->wnext = 0;
state->whave = 0;
return Z_OK;
}
/*
Return state with length and distance decoding tables and index sizes set to
fixed code decoding. Normally this returns fixed tables from inffixed.h.
If BUILDFIXED is defined, then instead this routine builds the tables the
first time it's called, and returns those tables the first time and
thereafter. This reduces the size of the code by about 2K bytes, in
exchange for a little execution time. However, BUILDFIXED should not be
used for threaded applications, since the rewriting of the tables and virgin
may not be thread-safe.
*/
local void fixedtables(state)
struct inflate_state FAR *state;
{
#ifdef BUILDFIXED
static int virgin = 1;
static code *lenfix, *distfix;
static code fixed[544];
/* build fixed huffman tables if first call (may not be thread safe) */
if (virgin) {
unsigned sym, bits;
static code *next;
/* literal/length table */
sym = 0;
while (sym < 144) state->lens[sym++] = 8;
while (sym < 256) state->lens[sym++] = 9;
while (sym < 280) state->lens[sym++] = 7;
while (sym < 288) state->lens[sym++] = 8;
next = fixed;
lenfix = next;
bits = 9;
inflate_table(LENS, state->lens, 288, &(next), &(bits), state->work);
/* distance table */
sym = 0;
while (sym < 32) state->lens[sym++] = 5;
distfix = next;
bits = 5;
inflate_table(DISTS, state->lens, 32, &(next), &(bits), state->work);
/* do this just once */
virgin = 0;
}
#else /* !BUILDFIXED */
# include "inffixed.h"
#endif /* BUILDFIXED */
state->lencode = lenfix;
state->lenbits = 9;
state->distcode = distfix;
state->distbits = 5;
}
/* Macros for inflateBack(): */
/* Load returned state from inflate_fast() */
#define LOAD() \
do { \
put = strm->next_out; \
left = strm->avail_out; \
next = strm->next_in; \
have = strm->avail_in; \
hold = state->hold; \
bits = state->bits; \
} while (0)
/* Set state from registers for inflate_fast() */
#define RESTORE() \
do { \
strm->next_out = put; \
strm->avail_out = left; \
strm->next_in = next; \
strm->avail_in = have; \
state->hold = hold; \
state->bits = bits; \
} while (0)
/* Clear the input bit accumulator */
#define INITBITS() \
do { \
hold = 0; \
bits = 0; \
} while (0)
/* Assure that some input is available. If input is requested, but denied,
then return a Z_BUF_ERROR from inflateBack(). */
#define PULL() \
do { \
if (have == 0) { \
have = in(in_desc, &next); \
if (have == 0) { \
next = Z_NULL; \
ret = Z_BUF_ERROR; \
goto inf_leave; \
} \
} \
} while (0)
/* Get a byte of input into the bit accumulator, or return from inflateBack()
with an error if there is no input available. */
#define PULLBYTE() \
do { \
PULL(); \
have--; \
hold += (unsigned long)(*next++) << bits; \
bits += 8; \
} while (0)
/* Assure that there are at least n bits in the bit accumulator. If there is
not enough available input to do that, then return from inflateBack() with
an error. */
#define NEEDBITS(n) \
do { \
while (bits < (unsigned)(n)) \
PULLBYTE(); \
} while (0)
/* Return the low n bits of the bit accumulator (n < 16) */
#define BITS(n) \
((unsigned)hold & ((1U << (n)) - 1))
/* Remove n bits from the bit accumulator */
#define DROPBITS(n) \
do { \
hold >>= (n); \
bits -= (unsigned)(n); \
} while (0)
/* Remove zero to seven bits as needed to go to a byte boundary */
#define BYTEBITS() \
do { \
hold >>= bits & 7; \
bits -= bits & 7; \
} while (0)
/* Assure that some output space is available, by writing out the window
if it's full. If the write fails, return from inflateBack() with a
Z_BUF_ERROR. */
#define ROOM() \
do { \
if (left == 0) { \
put = state->window; \
left = state->wsize; \
state->whave = left; \
if (out(out_desc, put, left)) { \
ret = Z_BUF_ERROR; \
goto inf_leave; \
} \
} \
} while (0)
/*
strm provides the memory allocation functions and window buffer on input,
and provides information on the unused input on return. For Z_DATA_ERROR
returns, strm will also provide an error message.
in() and out() are the call-back input and output functions. When
inflateBack() needs more input, it calls in(). When inflateBack() has
filled the window with output, or when it completes with data in the
window, it calls out() to write out the data. The application must not
change the provided input until in() is called again or inflateBack()
returns. The application must not change the window/output buffer until
inflateBack() returns.
in() and out() are called with a descriptor parameter provided in the
inflateBack() call. This parameter can be a structure that provides the
information required to do the read or write, as well as accumulated
information on the input and output such as totals and check values.
in() should return zero on failure. out() should return non-zero on
failure. If either in() or out() fails, than inflateBack() returns a
Z_BUF_ERROR. strm->next_in can be checked for Z_NULL to see whether it
was in() or out() that caused in the error. Otherwise, inflateBack()
returns Z_STREAM_END on success, Z_DATA_ERROR for an deflate format
error, or Z_MEM_ERROR if it could not allocate memory for the state.
inflateBack() can also return Z_STREAM_ERROR if the input parameters
are not correct, i.e. strm is Z_NULL or the state was not initialized.
*/
int ZEXPORT inflateBack(strm, in, in_desc, out, out_desc)
z_streamp strm;
in_func in;
void FAR *in_desc;
out_func out;
void FAR *out_desc;
{
struct inflate_state FAR *state;
z_const unsigned char FAR *next; /* next input */
unsigned char FAR *put; /* next output */
unsigned have, left; /* available input and output */
unsigned long hold; /* bit buffer */
unsigned bits; /* bits in bit buffer */
unsigned copy; /* number of stored or match bytes to copy */
unsigned char FAR *from; /* where to copy match bytes from */
code here; /* current decoding table entry */
code last; /* parent table entry */
unsigned len; /* length to copy for repeats, bits to drop */
int ret; /* return code */
static const unsigned short order[19] = /* permutation of code lengths */
{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
/* Check that the strm exists and that the state was initialized */
if (strm == Z_NULL || strm->state == Z_NULL)
return Z_STREAM_ERROR;
state = (struct inflate_state FAR *)strm->state;
/* Reset the state */
strm->msg = Z_NULL;
state->mode = TYPE;
state->last = 0;
state->whave = 0;
next = strm->next_in;
have = next != Z_NULL ? strm->avail_in : 0;
hold = 0;
bits = 0;
put = state->window;
left = state->wsize;
/* Inflate until end of block marked as last */
for (;;)
switch (state->mode) {
case TYPE:
/* determine and dispatch block type */
if (state->last) {
BYTEBITS();
state->mode = DONE;
break;
}
NEEDBITS(3);
state->last = BITS(1);
DROPBITS(1);
switch (BITS(2)) {
case 0: /* stored block */
Tracev((stderr, "inflate: stored block%s\n",
state->last ? " (last)" : ""));
state->mode = STORED;
break;
case 1: /* fixed block */
fixedtables(state);
Tracev((stderr, "inflate: fixed codes block%s\n",
state->last ? " (last)" : ""));
state->mode = LEN; /* decode codes */
break;
case 2: /* dynamic block */
Tracev((stderr, "inflate: dynamic codes block%s\n",
state->last ? " (last)" : ""));
state->mode = TABLE;
break;
case 3:
strm->msg = (char *)"invalid block type";
state->mode = BAD;
}
DROPBITS(2);
break;
case STORED:
/* get and verify stored block length */
BYTEBITS(); /* go to byte boundary */
NEEDBITS(32);
if ((hold & 0xffff) != ((hold >> 16) ^ 0xffff)) {
strm->msg = (char *)"invalid stored block lengths";
state->mode = BAD;
break;
}
state->length = (unsigned)hold & 0xffff;
Tracev((stderr, "inflate: stored length %u\n",
state->length));
INITBITS();
/* copy stored block from input to output */
while (state->length != 0) {
copy = state->length;
PULL();
ROOM();
if (copy > have) copy = have;
if (copy > left) copy = left;
zmemcpy(put, next, copy);
have -= copy;
next += copy;
left -= copy;
put += copy;
state->length -= copy;
}
Tracev((stderr, "inflate: stored end\n"));
state->mode = TYPE;
break;
case TABLE:
/* get dynamic table entries descriptor */
NEEDBITS(14);
state->nlen = BITS(5) + 257;
DROPBITS(5);
state->ndist = BITS(5) + 1;
DROPBITS(5);
state->ncode = BITS(4) + 4;
DROPBITS(4);
#ifndef PKZIP_BUG_WORKAROUND
if (state->nlen > 286 || state->ndist > 30) {
strm->msg = (char *)"too many length or distance symbols";
state->mode = BAD;
break;
}
#endif
Tracev((stderr, "inflate: table sizes ok\n"));
/* get code length code lengths (not a typo) */
state->have = 0;
while (state->have < state->ncode) {
NEEDBITS(3);
state->lens[order[state->have++]] = (unsigned short)BITS(3);
DROPBITS(3);
}
while (state->have < 19)
state->lens[order[state->have++]] = 0;
state->next = state->codes;
state->lencode = (code const FAR *)(state->next);
state->lenbits = 7;
ret = inflate_table(CODES, state->lens, 19, &(state->next),
&(state->lenbits), state->work);
if (ret) {
strm->msg = (char *)"invalid code lengths set";
state->mode = BAD;
break;
}
Tracev((stderr, "inflate: code lengths ok\n"));
/* get length and distance code code lengths */
state->have = 0;
while (state->have < state->nlen + state->ndist) {
for (;;) {
here = state->lencode[BITS(state->lenbits)];
if ((unsigned)(here.bits) <= bits) break;
PULLBYTE();
}
if (here.val < 16) {
DROPBITS(here.bits);
state->lens[state->have++] = here.val;
}
else {
if (here.val == 16) {
NEEDBITS(here.bits + 2);
DROPBITS(here.bits);
if (state->have == 0) {
strm->msg = (char *)"invalid bit length repeat";
state->mode = BAD;
break;
}
len = (unsigned)(state->lens[state->have - 1]);
copy = 3 + BITS(2);
DROPBITS(2);
}
else if (here.val == 17) {
NEEDBITS(here.bits + 3);
DROPBITS(here.bits);
len = 0;
copy = 3 + BITS(3);
DROPBITS(3);
}
else {
NEEDBITS(here.bits + 7);
DROPBITS(here.bits);
len = 0;
copy = 11 + BITS(7);
DROPBITS(7);
}
if (state->have + copy > state->nlen + state->ndist) {
strm->msg = (char *)"invalid bit length repeat";
state->mode = BAD;
break;
}
while (copy--)
state->lens[state->have++] = (unsigned short)len;
}
}
/* handle error breaks in while */
if (state->mode == BAD) break;
/* check for end-of-block code (better have one) */
if (state->lens[256] == 0) {
strm->msg = (char *)"invalid code -- missing end-of-block";
state->mode = BAD;
break;
}
/* build code tables -- note: do not change the lenbits or distbits
values here (9 and 6) without reading the comments in inftrees.h
concerning the ENOUGH constants, which depend on those values */
state->next = state->codes;
state->lencode = (code const FAR *)(state->next);
state->lenbits = 9;
ret = inflate_table(LENS, state->lens, state->nlen, &(state->next),
&(state->lenbits), state->work);
if (ret) {
strm->msg = (char *)"invalid literal/lengths set";
state->mode = BAD;
break;
}
state->distcode = (code const FAR *)(state->next);
state->distbits = 6;
ret = inflate_table(DISTS, state->lens + state->nlen, state->ndist,
&(state->next), &(state->distbits), state->work);
if (ret) {
strm->msg = (char *)"invalid distances set";
state->mode = BAD;
break;
}
Tracev((stderr, "inflate: codes ok\n"));
state->mode = LEN;
case LEN:
/* use inflate_fast() if we have enough input and output */
if (have >= 6 && left >= 258) {
RESTORE();
if (state->whave < state->wsize)
state->whave = state->wsize - left;
inflate_fast(strm, state->wsize);
LOAD();
break;
}
/* get a literal, length, or end-of-block code */
for (;;) {
here = state->lencode[BITS(state->lenbits)];
if ((unsigned)(here.bits) <= bits) break;
PULLBYTE();
}
if (here.op && (here.op & 0xf0) == 0) {
last = here;
for (;;) {
here = state->lencode[last.val +
(BITS(last.bits + last.op) >> last.bits)];
if ((unsigned)(last.bits + here.bits) <= bits) break;
PULLBYTE();
}
DROPBITS(last.bits);
}
DROPBITS(here.bits);
state->length = (unsigned)here.val;
/* process literal */
if (here.op == 0) {
Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?
"inflate: literal '%c'\n" :
"inflate: literal 0x%02x\n", here.val));
ROOM();
*put++ = (unsigned char)(state->length);
left--;
state->mode = LEN;
break;
}
/* process end of block */
if (here.op & 32) {
Tracevv((stderr, "inflate: end of block\n"));
state->mode = TYPE;
break;
}
/* invalid code */
if (here.op & 64) {
strm->msg = (char *)"invalid literal/length code";
state->mode = BAD;
break;
}
/* length code -- get extra bits, if any */
state->extra = (unsigned)(here.op) & 15;
if (state->extra != 0) {
NEEDBITS(state->extra);
state->length += BITS(state->extra);
DROPBITS(state->extra);
}
Tracevv((stderr, "inflate: length %u\n", state->length));
/* get distance code */
for (;;) {
here = state->distcode[BITS(state->distbits)];
if ((unsigned)(here.bits) <= bits) break;
PULLBYTE();
}
if ((here.op & 0xf0) == 0) {
last = here;
for (;;) {
here = state->distcode[last.val +
(BITS(last.bits + last.op) >> last.bits)];
if ((unsigned)(last.bits + here.bits) <= bits) break;
PULLBYTE();
}
DROPBITS(last.bits);
}
DROPBITS(here.bits);
if (here.op & 64) {
strm->msg = (char *)"invalid distance code";
state->mode = BAD;
break;
}
state->offset = (unsigned)here.val;
/* get distance extra bits, if any */
state->extra = (unsigned)(here.op) & 15;
if (state->extra != 0) {
NEEDBITS(state->extra);
state->offset += BITS(state->extra);
DROPBITS(state->extra);
}
if (state->offset > state->wsize - (state->whave < state->wsize ?
left : 0)) {
strm->msg = (char *)"invalid distance too far back";
state->mode = BAD;
break;
}
Tracevv((stderr, "inflate: distance %u\n", state->offset));
/* copy match from window to output */
do {
ROOM();
copy = state->wsize - state->offset;
if (copy < left) {
from = put + copy;
copy = left - copy;
}
else {
from = put - state->offset;
copy = left;
}
if (copy > state->length) copy = state->length;
state->length -= copy;
left -= copy;
do {
*put++ = *from++;
} while (--copy);
} while (state->length != 0);
break;
case DONE:
/* inflate stream terminated properly -- write leftover output */
ret = Z_STREAM_END;
if (left < state->wsize) {
if (out(out_desc, state->window, state->wsize - left))
ret = Z_BUF_ERROR;
}
goto inf_leave;
case BAD:
ret = Z_DATA_ERROR;
goto inf_leave;
default: /* can't happen, but makes compilers happy */
ret = Z_STREAM_ERROR;
goto inf_leave;
}
/* Return unused input */
inf_leave:
strm->next_in = next;
strm->avail_in = have;
return ret;
}
int ZEXPORT inflateBackEnd(strm)
z_streamp strm;
{
if (strm == Z_NULL || strm->state == Z_NULL || strm->zfree == (free_func)0)
return Z_STREAM_ERROR;
ZFREE(strm, strm->state);
strm->state = Z_NULL;
Tracev((stderr, "inflate: end\n"));
return Z_OK;
}

View File

@@ -1,340 +0,0 @@
/* inffast.c -- fast decoding
* Copyright (C) 1995-2008, 2010, 2013 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
#include "zutil.h"
#include "inftrees.h"
#include "inflate.h"
#include "inffast.h"
#ifndef ASMINF
/* Allow machine dependent optimization for post-increment or pre-increment.
Based on testing to date,
Pre-increment preferred for:
- PowerPC G3 (Adler)
- MIPS R5000 (Randers-Pehrson)
Post-increment preferred for:
- none
No measurable difference:
- Pentium III (Anderson)
- M68060 (Nikl)
*/
#ifdef POSTINC
# define OFF 0
# define PUP(a) *(a)++
#else
# define OFF 1
# define PUP(a) *++(a)
#endif
/*
Decode literal, length, and distance codes and write out the resulting
literal and match bytes until either not enough input or output is
available, an end-of-block is encountered, or a data error is encountered.
When large enough input and output buffers are supplied to inflate(), for
example, a 16K input buffer and a 64K output buffer, more than 95% of the
inflate execution time is spent in this routine.
Entry assumptions:
state->mode == LEN
strm->avail_in >= 6
strm->avail_out >= 258
start >= strm->avail_out
state->bits < 8
On return, state->mode is one of:
LEN -- ran out of enough output space or enough available input
TYPE -- reached end of block code, inflate() to interpret next block
BAD -- error in block data
Notes:
- The maximum input bits used by a length/distance pair is 15 bits for the
length code, 5 bits for the length extra, 15 bits for the distance code,
and 13 bits for the distance extra. This totals 48 bits, or six bytes.
Therefore if strm->avail_in >= 6, then there is enough input to avoid
checking for available input while decoding.
- The maximum bytes that a single length/distance pair can output is 258
bytes, which is the maximum length that can be coded. inflate_fast()
requires strm->avail_out >= 258 for each loop to avoid checking for
output space.
*/
void ZLIB_INTERNAL inflate_fast(strm, start)
z_streamp strm;
unsigned start; /* inflate()'s starting value for strm->avail_out */
{
struct inflate_state FAR *state;
z_const unsigned char FAR *in; /* local strm->next_in */
z_const unsigned char FAR *last; /* have enough input while in < last */
unsigned char FAR *out; /* local strm->next_out */
unsigned char FAR *beg; /* inflate()'s initial strm->next_out */
unsigned char FAR *end; /* while out < end, enough space available */
#ifdef INFLATE_STRICT
unsigned dmax; /* maximum distance from zlib header */
#endif
unsigned wsize; /* window size or zero if not using window */
unsigned whave; /* valid bytes in the window */
unsigned wnext; /* window write index */
unsigned char FAR *window; /* allocated sliding window, if wsize != 0 */
unsigned long hold; /* local strm->hold */
unsigned bits; /* local strm->bits */
code const FAR *lcode; /* local strm->lencode */
code const FAR *dcode; /* local strm->distcode */
unsigned lmask; /* mask for first level of length codes */
unsigned dmask; /* mask for first level of distance codes */
code here; /* retrieved table entry */
unsigned op; /* code bits, operation, extra bits, or */
/* window position, window bytes to copy */
unsigned len; /* match length, unused bytes */
unsigned dist; /* match distance */
unsigned char FAR *from; /* where to copy match from */
/* copy state to local variables */
state = (struct inflate_state FAR *)strm->state;
in = strm->next_in - OFF;
last = in + (strm->avail_in - 5);
out = strm->next_out - OFF;
beg = out - (start - strm->avail_out);
end = out + (strm->avail_out - 257);
#ifdef INFLATE_STRICT
dmax = state->dmax;
#endif
wsize = state->wsize;
whave = state->whave;
wnext = state->wnext;
window = state->window;
hold = state->hold;
bits = state->bits;
lcode = state->lencode;
dcode = state->distcode;
lmask = (1U << state->lenbits) - 1;
dmask = (1U << state->distbits) - 1;
/* decode literals and length/distances until end-of-block or not enough
input data or output space */
do {
if (bits < 15) {
hold += (unsigned long)(PUP(in)) << bits;
bits += 8;
hold += (unsigned long)(PUP(in)) << bits;
bits += 8;
}
here = lcode[hold & lmask];
dolen:
op = (unsigned)(here.bits);
hold >>= op;
bits -= op;
op = (unsigned)(here.op);
if (op == 0) { /* literal */
Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?
"inflate: literal '%c'\n" :
"inflate: literal 0x%02x\n", here.val));
PUP(out) = (unsigned char)(here.val);
}
else if (op & 16) { /* length base */
len = (unsigned)(here.val);
op &= 15; /* number of extra bits */
if (op) {
if (bits < op) {
hold += (unsigned long)(PUP(in)) << bits;
bits += 8;
}
len += (unsigned)hold & ((1U << op) - 1);
hold >>= op;
bits -= op;
}
Tracevv((stderr, "inflate: length %u\n", len));
if (bits < 15) {
hold += (unsigned long)(PUP(in)) << bits;
bits += 8;
hold += (unsigned long)(PUP(in)) << bits;
bits += 8;
}
here = dcode[hold & dmask];
dodist:
op = (unsigned)(here.bits);
hold >>= op;
bits -= op;
op = (unsigned)(here.op);
if (op & 16) { /* distance base */
dist = (unsigned)(here.val);
op &= 15; /* number of extra bits */
if (bits < op) {
hold += (unsigned long)(PUP(in)) << bits;
bits += 8;
if (bits < op) {
hold += (unsigned long)(PUP(in)) << bits;
bits += 8;
}
}
dist += (unsigned)hold & ((1U << op) - 1);
#ifdef INFLATE_STRICT
if (dist > dmax) {
strm->msg = (char *)"invalid distance too far back";
state->mode = BAD;
break;
}
#endif
hold >>= op;
bits -= op;
Tracevv((stderr, "inflate: distance %u\n", dist));
op = (unsigned)(out - beg); /* max distance in output */
if (dist > op) { /* see if copy from window */
op = dist - op; /* distance back in window */
if (op > whave) {
if (state->sane) {
strm->msg =
(char *)"invalid distance too far back";
state->mode = BAD;
break;
}
#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
if (len <= op - whave) {
do {
PUP(out) = 0;
} while (--len);
continue;
}
len -= op - whave;
do {
PUP(out) = 0;
} while (--op > whave);
if (op == 0) {
from = out - dist;
do {
PUP(out) = PUP(from);
} while (--len);
continue;
}
#endif
}
from = window - OFF;
if (wnext == 0) { /* very common case */
from += wsize - op;
if (op < len) { /* some from window */
len -= op;
do {
PUP(out) = PUP(from);
} while (--op);
from = out - dist; /* rest from output */
}
}
else if (wnext < op) { /* wrap around window */
from += wsize + wnext - op;
op -= wnext;
if (op < len) { /* some from end of window */
len -= op;
do {
PUP(out) = PUP(from);
} while (--op);
from = window - OFF;
if (wnext < len) { /* some from start of window */
op = wnext;
len -= op;
do {
PUP(out) = PUP(from);
} while (--op);
from = out - dist; /* rest from output */
}
}
}
else { /* contiguous in window */
from += wnext - op;
if (op < len) { /* some from window */
len -= op;
do {
PUP(out) = PUP(from);
} while (--op);
from = out - dist; /* rest from output */
}
}
while (len > 2) {
PUP(out) = PUP(from);
PUP(out) = PUP(from);
PUP(out) = PUP(from);
len -= 3;
}
if (len) {
PUP(out) = PUP(from);
if (len > 1)
PUP(out) = PUP(from);
}
}
else {
from = out - dist; /* copy direct from output */
do { /* minimum length is three */
PUP(out) = PUP(from);
PUP(out) = PUP(from);
PUP(out) = PUP(from);
len -= 3;
} while (len > 2);
if (len) {
PUP(out) = PUP(from);
if (len > 1)
PUP(out) = PUP(from);
}
}
}
else if ((op & 64) == 0) { /* 2nd level distance code */
here = dcode[here.val + (hold & ((1U << op) - 1))];
goto dodist;
}
else {
strm->msg = (char *)"invalid distance code";
state->mode = BAD;
break;
}
}
else if ((op & 64) == 0) { /* 2nd level length code */
here = lcode[here.val + (hold & ((1U << op) - 1))];
goto dolen;
}
else if (op & 32) { /* end-of-block */
Tracevv((stderr, "inflate: end of block\n"));
state->mode = TYPE;
break;
}
else {
strm->msg = (char *)"invalid literal/length code";
state->mode = BAD;
break;
}
} while (in < last && out < end);
/* return unused bytes (on entry, bits < 8, so in won't go too far back) */
len = bits >> 3;
in -= len;
bits -= len << 3;
hold &= (1U << bits) - 1;
/* update state and return */
strm->next_in = in + OFF;
strm->next_out = out + OFF;
strm->avail_in = (unsigned)(in < last ? 5 + (last - in) : 5 - (in - last));
strm->avail_out = (unsigned)(out < end ?
257 + (end - out) : 257 - (out - end));
state->hold = hold;
state->bits = bits;
return;
}
/*
inflate_fast() speedups that turned out slower (on a PowerPC G3 750CXe):
- Using bit fields for code structure
- Different op definition to avoid & for extra bits (do & for table bits)
- Three separate decoding do-loops for direct, window, and wnext == 0
- Special case for distance > 1 copies to do overlapped load and store copy
- Explicit branch predictions (based on measured branch probabilities)
- Deferring match copy and interspersed it with decoding subsequent codes
- Swapping literal/length else
- Swapping window/direct else
- Larger unrolled copy loops (three is about right)
- Moving len -= 3 statement into middle of loop
*/
#endif /* !ASMINF */

View File

@@ -1,11 +0,0 @@
/* inffast.h -- header to use inffast.c
* Copyright (C) 1995-2003, 2010 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* WARNING: this file should *not* be used by applications. It is
part of the implementation of the compression library and is
subject to change. Applications should only use zlib.h.
*/
void ZLIB_INTERNAL inflate_fast OF((z_streamp strm, unsigned start));

View File

@@ -1,94 +0,0 @@
/* inffixed.h -- table for decoding fixed codes
* Generated automatically by makefixed().
*/
/* WARNING: this file should *not* be used by applications.
It is part of the implementation of this library and is
subject to change. Applications should only use zlib.h.
*/
static const code lenfix[512] = {
{96,7,0},{0,8,80},{0,8,16},{20,8,115},{18,7,31},{0,8,112},{0,8,48},
{0,9,192},{16,7,10},{0,8,96},{0,8,32},{0,9,160},{0,8,0},{0,8,128},
{0,8,64},{0,9,224},{16,7,6},{0,8,88},{0,8,24},{0,9,144},{19,7,59},
{0,8,120},{0,8,56},{0,9,208},{17,7,17},{0,8,104},{0,8,40},{0,9,176},
{0,8,8},{0,8,136},{0,8,72},{0,9,240},{16,7,4},{0,8,84},{0,8,20},
{21,8,227},{19,7,43},{0,8,116},{0,8,52},{0,9,200},{17,7,13},{0,8,100},
{0,8,36},{0,9,168},{0,8,4},{0,8,132},{0,8,68},{0,9,232},{16,7,8},
{0,8,92},{0,8,28},{0,9,152},{20,7,83},{0,8,124},{0,8,60},{0,9,216},
{18,7,23},{0,8,108},{0,8,44},{0,9,184},{0,8,12},{0,8,140},{0,8,76},
{0,9,248},{16,7,3},{0,8,82},{0,8,18},{21,8,163},{19,7,35},{0,8,114},
{0,8,50},{0,9,196},{17,7,11},{0,8,98},{0,8,34},{0,9,164},{0,8,2},
{0,8,130},{0,8,66},{0,9,228},{16,7,7},{0,8,90},{0,8,26},{0,9,148},
{20,7,67},{0,8,122},{0,8,58},{0,9,212},{18,7,19},{0,8,106},{0,8,42},
{0,9,180},{0,8,10},{0,8,138},{0,8,74},{0,9,244},{16,7,5},{0,8,86},
{0,8,22},{64,8,0},{19,7,51},{0,8,118},{0,8,54},{0,9,204},{17,7,15},
{0,8,102},{0,8,38},{0,9,172},{0,8,6},{0,8,134},{0,8,70},{0,9,236},
{16,7,9},{0,8,94},{0,8,30},{0,9,156},{20,7,99},{0,8,126},{0,8,62},
{0,9,220},{18,7,27},{0,8,110},{0,8,46},{0,9,188},{0,8,14},{0,8,142},
{0,8,78},{0,9,252},{96,7,0},{0,8,81},{0,8,17},{21,8,131},{18,7,31},
{0,8,113},{0,8,49},{0,9,194},{16,7,10},{0,8,97},{0,8,33},{0,9,162},
{0,8,1},{0,8,129},{0,8,65},{0,9,226},{16,7,6},{0,8,89},{0,8,25},
{0,9,146},{19,7,59},{0,8,121},{0,8,57},{0,9,210},{17,7,17},{0,8,105},
{0,8,41},{0,9,178},{0,8,9},{0,8,137},{0,8,73},{0,9,242},{16,7,4},
{0,8,85},{0,8,21},{16,8,258},{19,7,43},{0,8,117},{0,8,53},{0,9,202},
{17,7,13},{0,8,101},{0,8,37},{0,9,170},{0,8,5},{0,8,133},{0,8,69},
{0,9,234},{16,7,8},{0,8,93},{0,8,29},{0,9,154},{20,7,83},{0,8,125},
{0,8,61},{0,9,218},{18,7,23},{0,8,109},{0,8,45},{0,9,186},{0,8,13},
{0,8,141},{0,8,77},{0,9,250},{16,7,3},{0,8,83},{0,8,19},{21,8,195},
{19,7,35},{0,8,115},{0,8,51},{0,9,198},{17,7,11},{0,8,99},{0,8,35},
{0,9,166},{0,8,3},{0,8,131},{0,8,67},{0,9,230},{16,7,7},{0,8,91},
{0,8,27},{0,9,150},{20,7,67},{0,8,123},{0,8,59},{0,9,214},{18,7,19},
{0,8,107},{0,8,43},{0,9,182},{0,8,11},{0,8,139},{0,8,75},{0,9,246},
{16,7,5},{0,8,87},{0,8,23},{64,8,0},{19,7,51},{0,8,119},{0,8,55},
{0,9,206},{17,7,15},{0,8,103},{0,8,39},{0,9,174},{0,8,7},{0,8,135},
{0,8,71},{0,9,238},{16,7,9},{0,8,95},{0,8,31},{0,9,158},{20,7,99},
{0,8,127},{0,8,63},{0,9,222},{18,7,27},{0,8,111},{0,8,47},{0,9,190},
{0,8,15},{0,8,143},{0,8,79},{0,9,254},{96,7,0},{0,8,80},{0,8,16},
{20,8,115},{18,7,31},{0,8,112},{0,8,48},{0,9,193},{16,7,10},{0,8,96},
{0,8,32},{0,9,161},{0,8,0},{0,8,128},{0,8,64},{0,9,225},{16,7,6},
{0,8,88},{0,8,24},{0,9,145},{19,7,59},{0,8,120},{0,8,56},{0,9,209},
{17,7,17},{0,8,104},{0,8,40},{0,9,177},{0,8,8},{0,8,136},{0,8,72},
{0,9,241},{16,7,4},{0,8,84},{0,8,20},{21,8,227},{19,7,43},{0,8,116},
{0,8,52},{0,9,201},{17,7,13},{0,8,100},{0,8,36},{0,9,169},{0,8,4},
{0,8,132},{0,8,68},{0,9,233},{16,7,8},{0,8,92},{0,8,28},{0,9,153},
{20,7,83},{0,8,124},{0,8,60},{0,9,217},{18,7,23},{0,8,108},{0,8,44},
{0,9,185},{0,8,12},{0,8,140},{0,8,76},{0,9,249},{16,7,3},{0,8,82},
{0,8,18},{21,8,163},{19,7,35},{0,8,114},{0,8,50},{0,9,197},{17,7,11},
{0,8,98},{0,8,34},{0,9,165},{0,8,2},{0,8,130},{0,8,66},{0,9,229},
{16,7,7},{0,8,90},{0,8,26},{0,9,149},{20,7,67},{0,8,122},{0,8,58},
{0,9,213},{18,7,19},{0,8,106},{0,8,42},{0,9,181},{0,8,10},{0,8,138},
{0,8,74},{0,9,245},{16,7,5},{0,8,86},{0,8,22},{64,8,0},{19,7,51},
{0,8,118},{0,8,54},{0,9,205},{17,7,15},{0,8,102},{0,8,38},{0,9,173},
{0,8,6},{0,8,134},{0,8,70},{0,9,237},{16,7,9},{0,8,94},{0,8,30},
{0,9,157},{20,7,99},{0,8,126},{0,8,62},{0,9,221},{18,7,27},{0,8,110},
{0,8,46},{0,9,189},{0,8,14},{0,8,142},{0,8,78},{0,9,253},{96,7,0},
{0,8,81},{0,8,17},{21,8,131},{18,7,31},{0,8,113},{0,8,49},{0,9,195},
{16,7,10},{0,8,97},{0,8,33},{0,9,163},{0,8,1},{0,8,129},{0,8,65},
{0,9,227},{16,7,6},{0,8,89},{0,8,25},{0,9,147},{19,7,59},{0,8,121},
{0,8,57},{0,9,211},{17,7,17},{0,8,105},{0,8,41},{0,9,179},{0,8,9},
{0,8,137},{0,8,73},{0,9,243},{16,7,4},{0,8,85},{0,8,21},{16,8,258},
{19,7,43},{0,8,117},{0,8,53},{0,9,203},{17,7,13},{0,8,101},{0,8,37},
{0,9,171},{0,8,5},{0,8,133},{0,8,69},{0,9,235},{16,7,8},{0,8,93},
{0,8,29},{0,9,155},{20,7,83},{0,8,125},{0,8,61},{0,9,219},{18,7,23},
{0,8,109},{0,8,45},{0,9,187},{0,8,13},{0,8,141},{0,8,77},{0,9,251},
{16,7,3},{0,8,83},{0,8,19},{21,8,195},{19,7,35},{0,8,115},{0,8,51},
{0,9,199},{17,7,11},{0,8,99},{0,8,35},{0,9,167},{0,8,3},{0,8,131},
{0,8,67},{0,9,231},{16,7,7},{0,8,91},{0,8,27},{0,9,151},{20,7,67},
{0,8,123},{0,8,59},{0,9,215},{18,7,19},{0,8,107},{0,8,43},{0,9,183},
{0,8,11},{0,8,139},{0,8,75},{0,9,247},{16,7,5},{0,8,87},{0,8,23},
{64,8,0},{19,7,51},{0,8,119},{0,8,55},{0,9,207},{17,7,15},{0,8,103},
{0,8,39},{0,9,175},{0,8,7},{0,8,135},{0,8,71},{0,9,239},{16,7,9},
{0,8,95},{0,8,31},{0,9,159},{20,7,99},{0,8,127},{0,8,63},{0,9,223},
{18,7,27},{0,8,111},{0,8,47},{0,9,191},{0,8,15},{0,8,143},{0,8,79},
{0,9,255}
};
static const code distfix[32] = {
{16,5,1},{23,5,257},{19,5,17},{27,5,4097},{17,5,5},{25,5,1025},
{21,5,65},{29,5,16385},{16,5,3},{24,5,513},{20,5,33},{28,5,8193},
{18,5,9},{26,5,2049},{22,5,129},{64,5,0},{16,5,2},{23,5,385},
{19,5,25},{27,5,6145},{17,5,7},{25,5,1537},{21,5,97},{29,5,24577},
{16,5,4},{24,5,769},{20,5,49},{28,5,12289},{18,5,13},{26,5,3073},
{22,5,193},{64,5,0}
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,122 +0,0 @@
/* inflate.h -- internal inflate state definition
* Copyright (C) 1995-2009 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* WARNING: this file should *not* be used by applications. It is
part of the implementation of the compression library and is
subject to change. Applications should only use zlib.h.
*/
/* define NO_GZIP when compiling if you want to disable gzip header and
trailer decoding by inflate(). NO_GZIP would be used to avoid linking in
the crc code when it is not needed. For shared libraries, gzip decoding
should be left enabled. */
#ifndef NO_GZIP
# define GUNZIP
#endif
/* Possible inflate modes between inflate() calls */
typedef enum {
HEAD, /* i: waiting for magic header */
FLAGS, /* i: waiting for method and flags (gzip) */
TIME, /* i: waiting for modification time (gzip) */
OS, /* i: waiting for extra flags and operating system (gzip) */
EXLEN, /* i: waiting for extra length (gzip) */
EXTRA, /* i: waiting for extra bytes (gzip) */
NAME, /* i: waiting for end of file name (gzip) */
COMMENT, /* i: waiting for end of comment (gzip) */
HCRC, /* i: waiting for header crc (gzip) */
DICTID, /* i: waiting for dictionary check value */
DICT, /* waiting for inflateSetDictionary() call */
TYPE, /* i: waiting for type bits, including last-flag bit */
TYPEDO, /* i: same, but skip check to exit inflate on new block */
STORED, /* i: waiting for stored size (length and complement) */
COPY_, /* i/o: same as COPY below, but only first time in */
COPY, /* i/o: waiting for input or output to copy stored block */
TABLE, /* i: waiting for dynamic block table lengths */
LENLENS, /* i: waiting for code length code lengths */
CODELENS, /* i: waiting for length/lit and distance code lengths */
LEN_, /* i: same as LEN below, but only first time in */
LEN, /* i: waiting for length/lit/eob code */
LENEXT, /* i: waiting for length extra bits */
DIST, /* i: waiting for distance code */
DISTEXT, /* i: waiting for distance extra bits */
MATCH, /* o: waiting for output space to copy string */
LIT, /* o: waiting for output space to write literal */
CHECK, /* i: waiting for 32-bit check value */
LENGTH, /* i: waiting for 32-bit length (gzip) */
DONE, /* finished check, done -- remain here until reset */
BAD, /* got a data error -- remain here until reset */
MEM, /* got an inflate() memory error -- remain here until reset */
SYNC /* looking for synchronization bytes to restart inflate() */
} inflate_mode;
/*
State transitions between above modes -
(most modes can go to BAD or MEM on error -- not shown for clarity)
Process header:
HEAD -> (gzip) or (zlib) or (raw)
(gzip) -> FLAGS -> TIME -> OS -> EXLEN -> EXTRA -> NAME -> COMMENT ->
HCRC -> TYPE
(zlib) -> DICTID or TYPE
DICTID -> DICT -> TYPE
(raw) -> TYPEDO
Read deflate blocks:
TYPE -> TYPEDO -> STORED or TABLE or LEN_ or CHECK
STORED -> COPY_ -> COPY -> TYPE
TABLE -> LENLENS -> CODELENS -> LEN_
LEN_ -> LEN
Read deflate codes in fixed or dynamic block:
LEN -> LENEXT or LIT or TYPE
LENEXT -> DIST -> DISTEXT -> MATCH -> LEN
LIT -> LEN
Process trailer:
CHECK -> LENGTH -> DONE
*/
/* state maintained between inflate() calls. Approximately 10K bytes. */
struct inflate_state {
inflate_mode mode; /* current inflate mode */
int last; /* true if processing last block */
int wrap; /* bit 0 true for zlib, bit 1 true for gzip */
int havedict; /* true if dictionary provided */
int flags; /* gzip header method and flags (0 if zlib) */
unsigned dmax; /* zlib header max distance (INFLATE_STRICT) */
unsigned long check; /* protected copy of check value */
unsigned long total; /* protected copy of output count */
gz_headerp head; /* where to save gzip header information */
/* sliding window */
unsigned wbits; /* log base 2 of requested window size */
unsigned wsize; /* window size or zero if not using window */
unsigned whave; /* valid bytes in the window */
unsigned wnext; /* window write index */
unsigned char FAR *window; /* allocated sliding window, if needed */
/* bit accumulator */
unsigned long hold; /* input bit accumulator */
unsigned bits; /* number of bits in "in" */
/* for string and stored block copying */
unsigned length; /* literal or length of data to copy */
unsigned offset; /* distance back to copy string from */
/* for table and code decoding */
unsigned extra; /* extra bits needed */
/* fixed and dynamic code tables */
code const FAR *lencode; /* starting table for length/literal codes */
code const FAR *distcode; /* starting table for distance codes */
unsigned lenbits; /* index bits for lencode */
unsigned distbits; /* index bits for distcode */
/* dynamic table building */
unsigned ncode; /* number of code length code lengths */
unsigned nlen; /* number of length code lengths */
unsigned ndist; /* number of distance code lengths */
unsigned have; /* number of code lengths in lens[] */
code FAR *next; /* next available space in codes[] */
unsigned short lens[320]; /* temporary storage for code lengths */
unsigned short work[288]; /* work area for code table building */
code codes[ENOUGH]; /* space for code tables */
int sane; /* if false, allow invalid distance too far */
int back; /* bits back of last unprocessed length/lit */
unsigned was; /* initial length of match */
};

View File

@@ -1,306 +0,0 @@
/* inftrees.c -- generate Huffman trees for efficient decoding
* Copyright (C) 1995-2013 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
#include "zutil.h"
#include "inftrees.h"
#define MAXBITS 15
const char inflate_copyright[] =
" inflate 1.2.8 Copyright 1995-2013 Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
include such an acknowledgment, I would appreciate that you keep this
copyright string in the executable of your product.
*/
/*
Build a set of tables to decode the provided canonical Huffman code.
The code lengths are lens[0..codes-1]. The result starts at *table,
whose indices are 0..2^bits-1. work is a writable array of at least
lens shorts, which is used as a work area. type is the type of code
to be generated, CODES, LENS, or DISTS. On return, zero is success,
-1 is an invalid code, and +1 means that ENOUGH isn't enough. table
on return points to the next available entry's address. bits is the
requested root table index bits, and on return it is the actual root
table index bits. It will differ if the request is greater than the
longest code or if it is less than the shortest code.
*/
int ZLIB_INTERNAL inflate_table(type, lens, codes, table, bits, work)
codetype type;
unsigned short FAR *lens;
unsigned codes;
code FAR * FAR *table;
unsigned FAR *bits;
unsigned short FAR *work;
{
unsigned len; /* a code's length in bits */
unsigned sym; /* index of code symbols */
unsigned min, max; /* minimum and maximum code lengths */
unsigned root; /* number of index bits for root table */
unsigned curr; /* number of index bits for current table */
unsigned drop; /* code bits to drop for sub-table */
int left; /* number of prefix codes available */
unsigned used; /* code entries in table used */
unsigned huff; /* Huffman code */
unsigned incr; /* for incrementing code, index */
unsigned fill; /* index for replicating entries */
unsigned low; /* low bits for current root entry */
unsigned mask; /* mask for low root bits */
code here; /* table entry for duplication */
code FAR *next; /* next available space in table */
const unsigned short FAR *base; /* base value table to use */
const unsigned short FAR *extra; /* extra bits table to use */
int end; /* use base and extra for symbol > end */
unsigned short count[MAXBITS+1]; /* number of codes of each length */
unsigned short offs[MAXBITS+1]; /* offsets in table for each length */
static const unsigned short lbase[31] = { /* Length codes 257..285 base */
3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
static const unsigned short lext[31] = { /* Length codes 257..285 extra */
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,
19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78};
static const unsigned short dbase[32] = { /* Distance codes 0..29 base */
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
8193, 12289, 16385, 24577, 0, 0};
static const unsigned short dext[32] = { /* Distance codes 0..29 extra */
16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22,
23, 23, 24, 24, 25, 25, 26, 26, 27, 27,
28, 28, 29, 29, 64, 64};
/*
Process a set of code lengths to create a canonical Huffman code. The
code lengths are lens[0..codes-1]. Each length corresponds to the
symbols 0..codes-1. The Huffman code is generated by first sorting the
symbols by length from short to long, and retaining the symbol order
for codes with equal lengths. Then the code starts with all zero bits
for the first code of the shortest length, and the codes are integer
increments for the same length, and zeros are appended as the length
increases. For the deflate format, these bits are stored backwards
from their more natural integer increment ordering, and so when the
decoding tables are built in the large loop below, the integer codes
are incremented backwards.
This routine assumes, but does not check, that all of the entries in
lens[] are in the range 0..MAXBITS. The caller must assure this.
1..MAXBITS is interpreted as that code length. zero means that that
symbol does not occur in this code.
The codes are sorted by computing a count of codes for each length,
creating from that a table of starting indices for each length in the
sorted table, and then entering the symbols in order in the sorted
table. The sorted table is work[], with that space being provided by
the caller.
The length counts are used for other purposes as well, i.e. finding
the minimum and maximum length codes, determining if there are any
codes at all, checking for a valid set of lengths, and looking ahead
at length counts to determine sub-table sizes when building the
decoding tables.
*/
/* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */
for (len = 0; len <= MAXBITS; len++)
count[len] = 0;
for (sym = 0; sym < codes; sym++)
count[lens[sym]]++;
/* bound code lengths, force root to be within code lengths */
root = *bits;
for (max = MAXBITS; max >= 1; max--)
if (count[max] != 0) break;
if (root > max) root = max;
if (max == 0) { /* no symbols to code at all */
here.op = (unsigned char)64; /* invalid code marker */
here.bits = (unsigned char)1;
here.val = (unsigned short)0;
*(*table)++ = here; /* make a table to force an error */
*(*table)++ = here;
*bits = 1;
return 0; /* no symbols, but wait for decoding to report error */
}
for (min = 1; min < max; min++)
if (count[min] != 0) break;
if (root < min) root = min;
/* check for an over-subscribed or incomplete set of lengths */
left = 1;
for (len = 1; len <= MAXBITS; len++) {
left <<= 1;
left -= count[len];
if (left < 0) return -1; /* over-subscribed */
}
if (left > 0 && (type == CODES || max != 1))
return -1; /* incomplete set */
/* generate offsets into symbol table for each length for sorting */
offs[1] = 0;
for (len = 1; len < MAXBITS; len++)
offs[len + 1] = offs[len] + count[len];
/* sort symbols by length, by symbol order within each length */
for (sym = 0; sym < codes; sym++)
if (lens[sym] != 0) work[offs[lens[sym]]++] = (unsigned short)sym;
/*
Create and fill in decoding tables. In this loop, the table being
filled is at next and has curr index bits. The code being used is huff
with length len. That code is converted to an index by dropping drop
bits off of the bottom. For codes where len is less than drop + curr,
those top drop + curr - len bits are incremented through all values to
fill the table with replicated entries.
root is the number of index bits for the root table. When len exceeds
root, sub-tables are created pointed to by the root entry with an index
of the low root bits of huff. This is saved in low to check for when a
new sub-table should be started. drop is zero when the root table is
being filled, and drop is root when sub-tables are being filled.
When a new sub-table is needed, it is necessary to look ahead in the
code lengths to determine what size sub-table is needed. The length
counts are used for this, and so count[] is decremented as codes are
entered in the tables.
used keeps track of how many table entries have been allocated from the
provided *table space. It is checked for LENS and DIST tables against
the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in
the initial root table size constants. See the comments in inftrees.h
for more information.
sym increments through all symbols, and the loop terminates when
all codes of length max, i.e. all codes, have been processed. This
routine permits incomplete codes, so another loop after this one fills
in the rest of the decoding tables with invalid code markers.
*/
/* set up for code type */
switch (type) {
case CODES:
base = extra = work; /* dummy value--not used */
end = 19;
break;
case LENS:
base = lbase;
base -= 257;
extra = lext;
extra -= 257;
end = 256;
break;
default: /* DISTS */
base = dbase;
extra = dext;
end = -1;
}
/* initialize state for loop */
huff = 0; /* starting code */
sym = 0; /* starting code symbol */
len = min; /* starting code length */
next = *table; /* current table to fill in */
curr = root; /* current table index bits */
drop = 0; /* current bits to drop from code for index */
low = (unsigned)(-1); /* trigger new sub-table when len > root */
used = 1U << root; /* use root table entries */
mask = used - 1; /* mask for comparing low */
/* check available table space */
if ((type == LENS && used > ENOUGH_LENS) ||
(type == DISTS && used > ENOUGH_DISTS))
return 1;
/* process all codes and make table entries */
for (;;) {
/* create table entry */
here.bits = (unsigned char)(len - drop);
if ((int)(work[sym]) < end) {
here.op = (unsigned char)0;
here.val = work[sym];
}
else if ((int)(work[sym]) > end) {
here.op = (unsigned char)(extra[work[sym]]);
here.val = base[work[sym]];
}
else {
here.op = (unsigned char)(32 + 64); /* end of block */
here.val = 0;
}
/* replicate for those indices with low len bits equal to huff */
incr = 1U << (len - drop);
fill = 1U << curr;
min = fill; /* save offset to next table */
do {
fill -= incr;
next[(huff >> drop) + fill] = here;
} while (fill != 0);
/* backwards increment the len-bit code huff */
incr = 1U << (len - 1);
while (huff & incr)
incr >>= 1;
if (incr != 0) {
huff &= incr - 1;
huff += incr;
}
else
huff = 0;
/* go to next symbol, update count, len */
sym++;
if (--(count[len]) == 0) {
if (len == max) break;
len = lens[work[sym]];
}
/* create new sub-table if needed */
if (len > root && (huff & mask) != low) {
/* if first time, transition to sub-tables */
if (drop == 0)
drop = root;
/* increment past last table */
next += min; /* here min is 1 << curr */
/* determine length of next table */
curr = len - drop;
left = (int)(1 << curr);
while (curr + drop < max) {
left -= count[curr + drop];
if (left <= 0) break;
curr++;
left <<= 1;
}
/* check for enough space */
used += 1U << curr;
if ((type == LENS && used > ENOUGH_LENS) ||
(type == DISTS && used > ENOUGH_DISTS))
return 1;
/* point entry in root table to sub-table */
low = huff & mask;
(*table)[low].op = (unsigned char)curr;
(*table)[low].bits = (unsigned char)root;
(*table)[low].val = (unsigned short)(next - *table);
}
}
/* fill in remaining table entry if code is incomplete (guaranteed to have
at most one remaining entry, since if the code is incomplete, the
maximum code length that was allowed to get this far is one bit) */
if (huff != 0) {
here.op = (unsigned char)64; /* invalid code marker */
here.bits = (unsigned char)(len - drop);
here.val = (unsigned short)0;
next[huff] = here;
}
/* set return parameters */
*table += used;
*bits = root;
return 0;
}

View File

@@ -1,62 +0,0 @@
/* inftrees.h -- header to use inftrees.c
* Copyright (C) 1995-2005, 2010 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* WARNING: this file should *not* be used by applications. It is
part of the implementation of the compression library and is
subject to change. Applications should only use zlib.h.
*/
/* Structure for decoding tables. Each entry provides either the
information needed to do the operation requested by the code that
indexed that table entry, or it provides a pointer to another
table that indexes more bits of the code. op indicates whether
the entry is a pointer to another table, a literal, a length or
distance, an end-of-block, or an invalid code. For a table
pointer, the low four bits of op is the number of index bits of
that table. For a length or distance, the low four bits of op
is the number of extra bits to get after the code. bits is
the number of bits in this code or part of the code to drop off
of the bit buffer. val is the actual byte to output in the case
of a literal, the base length or distance, or the offset from
the current table to the next table. Each entry is four bytes. */
typedef struct {
unsigned char op; /* operation, extra bits, table bits */
unsigned char bits; /* bits in this part of the code */
unsigned short val; /* offset in table or code value */
} code;
/* op values as set by inflate_table():
00000000 - literal
0000tttt - table link, tttt != 0 is the number of table index bits
0001eeee - length or distance, eeee is the number of extra bits
01100000 - end of block
01000000 - invalid code
*/
/* Maximum size of the dynamic table. The maximum number of code structures is
1444, which is the sum of 852 for literal/length codes and 592 for distance
codes. These values were found by exhaustive searches using the program
examples/enough.c found in the zlib distribtution. The arguments to that
program are the number of symbols, the initial root table size, and the
maximum bit length of a code. "enough 286 9 15" for literal/length codes
returns returns 852, and "enough 30 6 15" for distance codes returns 592.
The initial root table size (9 or 6) is found in the fifth argument of the
inflate_table() calls in inflate.c and infback.c. If the root table size is
changed, then these maximum sizes would be need to be recalculated and
updated. */
#define ENOUGH_LENS 852
#define ENOUGH_DISTS 592
#define ENOUGH (ENOUGH_LENS+ENOUGH_DISTS)
/* Type of code to build for inflate_table() */
typedef enum {
CODES,
LENS,
DISTS
} codetype;
int ZLIB_INTERNAL inflate_table OF((codetype type, unsigned short FAR *lens,
unsigned codes, code FAR * FAR *table,
unsigned FAR *bits, unsigned short FAR *work));

File diff suppressed because it is too large Load Diff

View File

@@ -1,128 +0,0 @@
/* header created automatically with -DGEN_TREES_H */
local const ct_data static_ltree[L_CODES+2] = {
{{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}},
{{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}},
{{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}},
{{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}},
{{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}},
{{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}},
{{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}},
{{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}},
{{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}},
{{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}},
{{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}},
{{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}},
{{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}},
{{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}},
{{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}},
{{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}},
{{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}},
{{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}},
{{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}},
{{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}},
{{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}},
{{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}},
{{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}},
{{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}},
{{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}},
{{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}},
{{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}},
{{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}},
{{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}},
{{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}},
{{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}},
{{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}},
{{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}},
{{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}},
{{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}},
{{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}},
{{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}},
{{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}},
{{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}},
{{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}},
{{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}},
{{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}},
{{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}},
{{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}},
{{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}},
{{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}},
{{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}},
{{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}},
{{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}},
{{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}},
{{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}},
{{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}},
{{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}},
{{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}},
{{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}},
{{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}},
{{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}},
{{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}}
};
local const ct_data static_dtree[D_CODES] = {
{{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}},
{{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}},
{{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}},
{{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}},
{{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}},
{{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}}
};
const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {
0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8,
8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17,
18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22,
23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
};
const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {
0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12,
13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,
19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
};
local const int base_length[LENGTH_CODES] = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
64, 80, 96, 112, 128, 160, 192, 224, 0
};
local const int base_dist[D_CODES] = {
0, 1, 2, 3, 4, 6, 8, 12, 16, 24,
32, 48, 64, 96, 128, 192, 256, 384, 512, 768,
1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576
};

View File

@@ -1,59 +0,0 @@
/* uncompr.c -- decompress a memory buffer
* Copyright (C) 1995-2003, 2010 Jean-loup Gailly.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id$ */
#define ZLIB_INTERNAL
#include "zlib.h"
/* ===========================================================================
Decompresses the source buffer into the destination buffer. sourceLen is
the byte length of the source buffer. Upon entry, destLen is the total
size of the destination buffer, which must be large enough to hold the
entire uncompressed data. (The size of the uncompressed data must have
been saved previously by the compressor and transmitted to the decompressor
by some mechanism outside the scope of this compression library.)
Upon exit, destLen is the actual size of the compressed buffer.
uncompress returns Z_OK if success, Z_MEM_ERROR if there was not
enough memory, Z_BUF_ERROR if there was not enough room in the output
buffer, or Z_DATA_ERROR if the input data was corrupted.
*/
int ZEXPORT uncompress (dest, destLen, source, sourceLen)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
uLong sourceLen;
{
z_stream stream;
int err;
stream.next_in = (z_const Bytef *)source;
stream.avail_in = (uInt)sourceLen;
/* Check for source > 64K on 16-bit machine: */
if ((uLong)stream.avail_in != sourceLen) return Z_BUF_ERROR;
stream.next_out = dest;
stream.avail_out = (uInt)*destLen;
if ((uLong)stream.avail_out != *destLen) return Z_BUF_ERROR;
stream.zalloc = (alloc_func)0;
stream.zfree = (free_func)0;
err = inflateInit(&stream);
if (err != Z_OK) return err;
err = inflate(&stream, Z_FINISH);
if (err != Z_STREAM_END) {
inflateEnd(&stream);
if (err == Z_NEED_DICT || (err == Z_BUF_ERROR && stream.avail_in == 0))
return Z_DATA_ERROR;
return err;
}
*destLen = stream.total_out;
err = inflateEnd(&stream);
return err;
}

View File

@@ -1,511 +0,0 @@
/* zconf.h -- configuration of the zlib compression library
* Copyright (C) 1995-2013 Jean-loup Gailly.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id$ */
#ifndef ZCONF_H
#define ZCONF_H
/*
* If you *really* need a unique prefix for all types and library functions,
* compile with -DZ_PREFIX. The "standard" zlib should be compiled without it.
* Even better than compiling with -DZ_PREFIX would be to use configure to set
* this permanently in zconf.h using "./configure --zprefix".
*/
#ifdef Z_PREFIX /* may be set to #if 1 by ./configure */
# define Z_PREFIX_SET
/* all linked symbols */
# define _dist_code z__dist_code
# define _length_code z__length_code
# define _tr_align z__tr_align
# define _tr_flush_bits z__tr_flush_bits
# define _tr_flush_block z__tr_flush_block
# define _tr_init z__tr_init
# define _tr_stored_block z__tr_stored_block
# define _tr_tally z__tr_tally
# define adler32 z_adler32
# define adler32_combine z_adler32_combine
# define adler32_combine64 z_adler32_combine64
# ifndef Z_SOLO
# define compress z_compress
# define compress2 z_compress2
# define compressBound z_compressBound
# endif
# define crc32 z_crc32
# define crc32_combine z_crc32_combine
# define crc32_combine64 z_crc32_combine64
# define deflate z_deflate
# define deflateBound z_deflateBound
# define deflateCopy z_deflateCopy
# define deflateEnd z_deflateEnd
# define deflateInit2_ z_deflateInit2_
# define deflateInit_ z_deflateInit_
# define deflateParams z_deflateParams
# define deflatePending z_deflatePending
# define deflatePrime z_deflatePrime
# define deflateReset z_deflateReset
# define deflateResetKeep z_deflateResetKeep
# define deflateSetDictionary z_deflateSetDictionary
# define deflateSetHeader z_deflateSetHeader
# define deflateTune z_deflateTune
# define deflate_copyright z_deflate_copyright
# define get_crc_table z_get_crc_table
# ifndef Z_SOLO
# define gz_error z_gz_error
# define gz_intmax z_gz_intmax
# define gz_strwinerror z_gz_strwinerror
# define gzbuffer z_gzbuffer
# define gzclearerr z_gzclearerr
# define gzclose z_gzclose
# define gzclose_r z_gzclose_r
# define gzclose_w z_gzclose_w
# define gzdirect z_gzdirect
# define gzdopen z_gzdopen
# define gzeof z_gzeof
# define gzerror z_gzerror
# define gzflush z_gzflush
# define gzgetc z_gzgetc
# define gzgetc_ z_gzgetc_
# define gzgets z_gzgets
# define gzoffset z_gzoffset
# define gzoffset64 z_gzoffset64
# define gzopen z_gzopen
# define gzopen64 z_gzopen64
# ifdef _WIN32
# define gzopen_w z_gzopen_w
# endif
# define gzprintf z_gzprintf
# define gzvprintf z_gzvprintf
# define gzputc z_gzputc
# define gzputs z_gzputs
# define gzread z_gzread
# define gzrewind z_gzrewind
# define gzseek z_gzseek
# define gzseek64 z_gzseek64
# define gzsetparams z_gzsetparams
# define gztell z_gztell
# define gztell64 z_gztell64
# define gzungetc z_gzungetc
# define gzwrite z_gzwrite
# endif
# define inflate z_inflate
# define inflateBack z_inflateBack
# define inflateBackEnd z_inflateBackEnd
# define inflateBackInit_ z_inflateBackInit_
# define inflateCopy z_inflateCopy
# define inflateEnd z_inflateEnd
# define inflateGetHeader z_inflateGetHeader
# define inflateInit2_ z_inflateInit2_
# define inflateInit_ z_inflateInit_
# define inflateMark z_inflateMark
# define inflatePrime z_inflatePrime
# define inflateReset z_inflateReset
# define inflateReset2 z_inflateReset2
# define inflateSetDictionary z_inflateSetDictionary
# define inflateGetDictionary z_inflateGetDictionary
# define inflateSync z_inflateSync
# define inflateSyncPoint z_inflateSyncPoint
# define inflateUndermine z_inflateUndermine
# define inflateResetKeep z_inflateResetKeep
# define inflate_copyright z_inflate_copyright
# define inflate_fast z_inflate_fast
# define inflate_table z_inflate_table
# ifndef Z_SOLO
# define uncompress z_uncompress
# endif
# define zError z_zError
# ifndef Z_SOLO
# define zcalloc z_zcalloc
# define zcfree z_zcfree
# endif
# define zlibCompileFlags z_zlibCompileFlags
# define zlibVersion z_zlibVersion
/* all zlib typedefs in zlib.h and zconf.h */
# define Byte z_Byte
# define Bytef z_Bytef
# define alloc_func z_alloc_func
# define charf z_charf
# define free_func z_free_func
# ifndef Z_SOLO
# define gzFile z_gzFile
# endif
# define gz_header z_gz_header
# define gz_headerp z_gz_headerp
# define in_func z_in_func
# define intf z_intf
# define out_func z_out_func
# define uInt z_uInt
# define uIntf z_uIntf
# define uLong z_uLong
# define uLongf z_uLongf
# define voidp z_voidp
# define voidpc z_voidpc
# define voidpf z_voidpf
/* all zlib structs in zlib.h and zconf.h */
# define gz_header_s z_gz_header_s
# define internal_state z_internal_state
#endif
#if defined(__MSDOS__) && !defined(MSDOS)
# define MSDOS
#endif
#if (defined(OS_2) || defined(__OS2__)) && !defined(OS2)
# define OS2
#endif
#if defined(_WINDOWS) && !defined(WINDOWS)
# define WINDOWS
#endif
#if defined(_WIN32) || defined(_WIN32_WCE) || defined(__WIN32__)
# ifndef WIN32
# define WIN32
# endif
#endif
#if (defined(MSDOS) || defined(OS2) || defined(WINDOWS)) && !defined(WIN32)
# if !defined(__GNUC__) && !defined(__FLAT__) && !defined(__386__)
# ifndef SYS16BIT
# define SYS16BIT
# endif
# endif
#endif
/*
* Compile with -DMAXSEG_64K if the alloc function cannot allocate more
* than 64k bytes at a time (needed on systems with 16-bit int).
*/
#ifdef SYS16BIT
# define MAXSEG_64K
#endif
#ifdef MSDOS
# define UNALIGNED_OK
#endif
#ifdef __STDC_VERSION__
# ifndef STDC
# define STDC
# endif
# if __STDC_VERSION__ >= 199901L
# ifndef STDC99
# define STDC99
# endif
# endif
#endif
#if !defined(STDC) && (defined(__STDC__) || defined(__cplusplus))
# define STDC
#endif
#if !defined(STDC) && (defined(__GNUC__) || defined(__BORLANDC__))
# define STDC
#endif
#if !defined(STDC) && (defined(MSDOS) || defined(WINDOWS) || defined(WIN32))
# define STDC
#endif
#if !defined(STDC) && (defined(OS2) || defined(__HOS_AIX__))
# define STDC
#endif
#if defined(__OS400__) && !defined(STDC) /* iSeries (formerly AS/400). */
# define STDC
#endif
#ifndef STDC
# ifndef const /* cannot use !defined(STDC) && !defined(const) on Mac */
# define const /* note: need a more gentle solution here */
# endif
#endif
#if defined(ZLIB_CONST) && !defined(z_const)
# define z_const const
#else
# define z_const
#endif
/* Some Mac compilers merge all .h files incorrectly: */
#if defined(__MWERKS__)||defined(applec)||defined(THINK_C)||defined(__SC__)
# define NO_DUMMY_DECL
#endif
/* Maximum value for memLevel in deflateInit2 */
#ifndef MAX_MEM_LEVEL
# ifdef MAXSEG_64K
# define MAX_MEM_LEVEL 8
# else
# define MAX_MEM_LEVEL 9
# endif
#endif
/* Maximum value for windowBits in deflateInit2 and inflateInit2.
* WARNING: reducing MAX_WBITS makes minigzip unable to extract .gz files
* created by gzip. (Files created by minigzip can still be extracted by
* gzip.)
*/
#ifndef MAX_WBITS
# define MAX_WBITS 15 /* 32K LZ77 window */
#endif
/* The memory requirements for deflate are (in bytes):
(1 << (windowBits+2)) + (1 << (memLevel+9))
that is: 128K for windowBits=15 + 128K for memLevel = 8 (default values)
plus a few kilobytes for small objects. For example, if you want to reduce
the default memory requirements from 256K to 128K, compile with
make CFLAGS="-O -DMAX_WBITS=14 -DMAX_MEM_LEVEL=7"
Of course this will generally degrade compression (there's no free lunch).
The memory requirements for inflate are (in bytes) 1 << windowBits
that is, 32K for windowBits=15 (default value) plus a few kilobytes
for small objects.
*/
/* Type declarations */
#ifndef OF /* function prototypes */
# ifdef STDC
# define OF(args) args
# else
# define OF(args) ()
# endif
#endif
#ifndef Z_ARG /* function prototypes for stdarg */
# if defined(STDC) || defined(Z_HAVE_STDARG_H)
# define Z_ARG(args) args
# else
# define Z_ARG(args) ()
# endif
#endif
/* The following definitions for FAR are needed only for MSDOS mixed
* model programming (small or medium model with some far allocations).
* This was tested only with MSC; for other MSDOS compilers you may have
* to define NO_MEMCPY in zutil.h. If you don't need the mixed model,
* just define FAR to be empty.
*/
#ifdef SYS16BIT
# if defined(M_I86SM) || defined(M_I86MM)
/* MSC small or medium model */
# define SMALL_MEDIUM
# ifdef _MSC_VER
# define FAR _far
# else
# define FAR far
# endif
# endif
# if (defined(__SMALL__) || defined(__MEDIUM__))
/* Turbo C small or medium model */
# define SMALL_MEDIUM
# ifdef __BORLANDC__
# define FAR _far
# else
# define FAR far
# endif
# endif
#endif
#if defined(WINDOWS) || defined(WIN32)
/* If building or using zlib as a DLL, define ZLIB_DLL.
* This is not mandatory, but it offers a little performance increase.
*/
# ifdef ZLIB_DLL
# if defined(WIN32) && (!defined(__BORLANDC__) || (__BORLANDC__ >= 0x500))
# ifdef ZLIB_INTERNAL
# define ZEXTERN extern __declspec(dllexport)
# else
# define ZEXTERN extern __declspec(dllimport)
# endif
# endif
# endif /* ZLIB_DLL */
/* If building or using zlib with the WINAPI/WINAPIV calling convention,
* define ZLIB_WINAPI.
* Caution: the standard ZLIB1.DLL is NOT compiled using ZLIB_WINAPI.
*/
# ifdef ZLIB_WINAPI
# ifdef FAR
# undef FAR
# endif
# include <windows.h>
/* No need for _export, use ZLIB.DEF instead. */
/* For complete Windows compatibility, use WINAPI, not __stdcall. */
# define ZEXPORT WINAPI
# ifdef WIN32
# define ZEXPORTVA WINAPIV
# else
# define ZEXPORTVA FAR CDECL
# endif
# endif
#endif
#if defined (__BEOS__)
# ifdef ZLIB_DLL
# ifdef ZLIB_INTERNAL
# define ZEXPORT __declspec(dllexport)
# define ZEXPORTVA __declspec(dllexport)
# else
# define ZEXPORT __declspec(dllimport)
# define ZEXPORTVA __declspec(dllimport)
# endif
# endif
#endif
#ifndef ZEXTERN
# define ZEXTERN extern
#endif
#ifndef ZEXPORT
# define ZEXPORT
#endif
#ifndef ZEXPORTVA
# define ZEXPORTVA
#endif
#ifndef FAR
# define FAR
#endif
#if !defined(__MACTYPES__)
typedef unsigned char Byte; /* 8 bits */
#endif
typedef unsigned int uInt; /* 16 bits or more */
typedef unsigned long uLong; /* 32 bits or more */
#ifdef SMALL_MEDIUM
/* Borland C/C++ and some old MSC versions ignore FAR inside typedef */
# define Bytef Byte FAR
#else
typedef Byte FAR Bytef;
#endif
typedef char FAR charf;
typedef int FAR intf;
typedef uInt FAR uIntf;
typedef uLong FAR uLongf;
#ifdef STDC
typedef void const *voidpc;
typedef void FAR *voidpf;
typedef void *voidp;
#else
typedef Byte const *voidpc;
typedef Byte FAR *voidpf;
typedef Byte *voidp;
#endif
#if !defined(Z_U4) && !defined(Z_SOLO) && defined(STDC)
# include <limits.h>
# if (UINT_MAX == 0xffffffffUL)
# define Z_U4 unsigned
# elif (ULONG_MAX == 0xffffffffUL)
# define Z_U4 unsigned long
# elif (USHRT_MAX == 0xffffffffUL)
# define Z_U4 unsigned short
# endif
#endif
#ifdef Z_U4
typedef Z_U4 z_crc_t;
#else
typedef unsigned long z_crc_t;
#endif
#ifdef HAVE_UNISTD_H /* may be set to #if 1 by ./configure */
# define Z_HAVE_UNISTD_H
#endif
#ifdef HAVE_STDARG_H /* may be set to #if 1 by ./configure */
# define Z_HAVE_STDARG_H
#endif
#ifdef STDC
# ifndef Z_SOLO
# include <sys/types.h> /* for off_t */
# endif
#endif
#if defined(STDC) || defined(Z_HAVE_STDARG_H)
# ifndef Z_SOLO
# include <stdarg.h> /* for va_list */
# endif
#endif
#ifdef _WIN32
# ifndef Z_SOLO
# include <stddef.h> /* for wchar_t */
# endif
#endif
/* a little trick to accommodate both "#define _LARGEFILE64_SOURCE" and
* "#define _LARGEFILE64_SOURCE 1" as requesting 64-bit operations, (even
* though the former does not conform to the LFS document), but considering
* both "#undef _LARGEFILE64_SOURCE" and "#define _LARGEFILE64_SOURCE 0" as
* equivalently requesting no 64-bit operations
*/
#if defined(_LARGEFILE64_SOURCE) && -_LARGEFILE64_SOURCE - -1 == 1
# undef _LARGEFILE64_SOURCE
#endif
#if defined(__WATCOMC__) && !defined(Z_HAVE_UNISTD_H)
# define Z_HAVE_UNISTD_H
#endif
#ifndef Z_SOLO
# if defined(Z_HAVE_UNISTD_H) || defined(_LARGEFILE64_SOURCE)
# include <unistd.h> /* for SEEK_*, off_t, and _LFS64_LARGEFILE */
# ifdef VMS
# include <unixio.h> /* for off_t */
# endif
# ifndef z_off_t
# define z_off_t off_t
# endif
# endif
#endif
#if defined(_LFS64_LARGEFILE) && _LFS64_LARGEFILE-0
# define Z_LFS64
#endif
#if defined(_LARGEFILE64_SOURCE) && defined(Z_LFS64)
# define Z_LARGE64
#endif
#if defined(_FILE_OFFSET_BITS) && _FILE_OFFSET_BITS-0 == 64 && defined(Z_LFS64)
# define Z_WANT64
#endif
#if !defined(SEEK_SET) && !defined(Z_SOLO)
# define SEEK_SET 0 /* Seek from beginning of file. */
# define SEEK_CUR 1 /* Seek from current position. */
# define SEEK_END 2 /* Set file pointer to EOF plus "offset" */
#endif
#ifndef z_off_t
# define z_off_t long
#endif
#if !defined(_WIN32) && defined(Z_LARGE64)
# define z_off64_t off64_t
#else
# if defined(_WIN32) && !defined(__GNUC__) && !defined(Z_SOLO)
# define z_off64_t __int64
# else
# define z_off64_t z_off_t
# endif
#endif
/* MVS linker does not support external names larger than 8 bytes */
#if defined(__MVS__)
#pragma map(deflateInit_,"DEIN")
#pragma map(deflateInit2_,"DEIN2")
#pragma map(deflateEnd,"DEEND")
#pragma map(deflateBound,"DEBND")
#pragma map(inflateInit_,"ININ")
#pragma map(inflateInit2_,"ININ2")
#pragma map(inflateEnd,"INEND")
#pragma map(inflateSync,"INSY")
#pragma map(inflateSetDictionary,"INSEDI")
#pragma map(compressBound,"CMBND")
#pragma map(inflate_table,"INTABL")
#pragma map(inflate_fast,"INFA")
#pragma map(inflate_copyright,"INCOPY")
#endif
#endif /* ZCONF_H */

File diff suppressed because it is too large Load Diff

View File

@@ -1,324 +0,0 @@
/* zutil.c -- target dependent utility functions for the compression library
* Copyright (C) 1995-2005, 2010, 2011, 2012 Jean-loup Gailly.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id$ */
#include "zutil.h"
#ifndef Z_SOLO
# include "gzguts.h"
#endif
#ifndef NO_DUMMY_DECL
struct internal_state {int dummy;}; /* for buggy compilers */
#endif
z_const char * const z_errmsg[10] = {
"need dictionary", /* Z_NEED_DICT 2 */
"stream end", /* Z_STREAM_END 1 */
"", /* Z_OK 0 */
"file error", /* Z_ERRNO (-1) */
"stream error", /* Z_STREAM_ERROR (-2) */
"data error", /* Z_DATA_ERROR (-3) */
"insufficient memory", /* Z_MEM_ERROR (-4) */
"buffer error", /* Z_BUF_ERROR (-5) */
"incompatible version",/* Z_VERSION_ERROR (-6) */
""};
const char * ZEXPORT zlibVersion()
{
return ZLIB_VERSION;
}
uLong ZEXPORT zlibCompileFlags()
{
uLong flags;
flags = 0;
switch ((int)(sizeof(uInt))) {
case 2: break;
case 4: flags += 1; break;
case 8: flags += 2; break;
default: flags += 3;
}
switch ((int)(sizeof(uLong))) {
case 2: break;
case 4: flags += 1 << 2; break;
case 8: flags += 2 << 2; break;
default: flags += 3 << 2;
}
switch ((int)(sizeof(voidpf))) {
case 2: break;
case 4: flags += 1 << 4; break;
case 8: flags += 2 << 4; break;
default: flags += 3 << 4;
}
switch ((int)(sizeof(z_off_t))) {
case 2: break;
case 4: flags += 1 << 6; break;
case 8: flags += 2 << 6; break;
default: flags += 3 << 6;
}
#ifdef DEBUG
flags += 1 << 8;
#endif
#if defined(ASMV) || defined(ASMINF)
flags += 1 << 9;
#endif
#ifdef ZLIB_WINAPI
flags += 1 << 10;
#endif
#ifdef BUILDFIXED
flags += 1 << 12;
#endif
#ifdef DYNAMIC_CRC_TABLE
flags += 1 << 13;
#endif
#ifdef NO_GZCOMPRESS
flags += 1L << 16;
#endif
#ifdef NO_GZIP
flags += 1L << 17;
#endif
#ifdef PKZIP_BUG_WORKAROUND
flags += 1L << 20;
#endif
#ifdef FASTEST
flags += 1L << 21;
#endif
#if defined(STDC) || defined(Z_HAVE_STDARG_H)
# ifdef NO_vsnprintf
flags += 1L << 25;
# ifdef HAS_vsprintf_void
flags += 1L << 26;
# endif
# else
# ifdef HAS_vsnprintf_void
flags += 1L << 26;
# endif
# endif
#else
flags += 1L << 24;
# ifdef NO_snprintf
flags += 1L << 25;
# ifdef HAS_sprintf_void
flags += 1L << 26;
# endif
# else
# ifdef HAS_snprintf_void
flags += 1L << 26;
# endif
# endif
#endif
return flags;
}
#ifdef DEBUG
# ifndef verbose
# define verbose 0
# endif
int ZLIB_INTERNAL z_verbose = verbose;
void ZLIB_INTERNAL z_error (m)
char *m;
{
fprintf(stderr, "%s\n", m);
exit(1);
}
#endif
/* exported to allow conversion of error code to string for compress() and
* uncompress()
*/
const char * ZEXPORT zError(err)
int err;
{
return ERR_MSG(err);
}
#if defined(_WIN32_WCE)
/* The Microsoft C Run-Time Library for Windows CE doesn't have
* errno. We define it as a global variable to simplify porting.
* Its value is always 0 and should not be used.
*/
int errno = 0;
#endif
#ifndef HAVE_MEMCPY
void ZLIB_INTERNAL zmemcpy(dest, source, len)
Bytef* dest;
const Bytef* source;
uInt len;
{
if (len == 0) return;
do {
*dest++ = *source++; /* ??? to be unrolled */
} while (--len != 0);
}
int ZLIB_INTERNAL zmemcmp(s1, s2, len)
const Bytef* s1;
const Bytef* s2;
uInt len;
{
uInt j;
for (j = 0; j < len; j++) {
if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
}
return 0;
}
void ZLIB_INTERNAL zmemzero(dest, len)
Bytef* dest;
uInt len;
{
if (len == 0) return;
do {
*dest++ = 0; /* ??? to be unrolled */
} while (--len != 0);
}
#endif
#ifndef Z_SOLO
#ifdef SYS16BIT
#ifdef __TURBOC__
/* Turbo C in 16-bit mode */
# define MY_ZCALLOC
/* Turbo C malloc() does not allow dynamic allocation of 64K bytes
* and farmalloc(64K) returns a pointer with an offset of 8, so we
* must fix the pointer. Warning: the pointer must be put back to its
* original form in order to free it, use zcfree().
*/
#define MAX_PTR 10
/* 10*64K = 640K */
local int next_ptr = 0;
typedef struct ptr_table_s {
voidpf org_ptr;
voidpf new_ptr;
} ptr_table;
local ptr_table table[MAX_PTR];
/* This table is used to remember the original form of pointers
* to large buffers (64K). Such pointers are normalized with a zero offset.
* Since MSDOS is not a preemptive multitasking OS, this table is not
* protected from concurrent access. This hack doesn't work anyway on
* a protected system like OS/2. Use Microsoft C instead.
*/
voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
{
voidpf buf = opaque; /* just to make some compilers happy */
ulg bsize = (ulg)items*size;
/* If we allocate less than 65520 bytes, we assume that farmalloc
* will return a usable pointer which doesn't have to be normalized.
*/
if (bsize < 65520L) {
buf = farmalloc(bsize);
if (*(ush*)&buf != 0) return buf;
} else {
buf = farmalloc(bsize + 16L);
}
if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
table[next_ptr].org_ptr = buf;
/* Normalize the pointer to seg:0 */
*((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
*(ush*)&buf = 0;
table[next_ptr++].new_ptr = buf;
return buf;
}
void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
{
int n;
if (*(ush*)&ptr != 0) { /* object < 64K */
farfree(ptr);
return;
}
/* Find the original pointer */
for (n = 0; n < next_ptr; n++) {
if (ptr != table[n].new_ptr) continue;
farfree(table[n].org_ptr);
while (++n < next_ptr) {
table[n-1] = table[n];
}
next_ptr--;
return;
}
ptr = opaque; /* just to make some compilers happy */
Assert(0, "zcfree: ptr not found");
}
#endif /* __TURBOC__ */
#ifdef M_I86
/* Microsoft C in 16-bit mode */
# define MY_ZCALLOC
#if (!defined(_MSC_VER) || (_MSC_VER <= 600))
# define _halloc halloc
# define _hfree hfree
#endif
voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, uInt items, uInt size)
{
if (opaque) opaque = 0; /* to make compiler happy */
return _halloc((long)items, size);
}
void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
{
if (opaque) opaque = 0; /* to make compiler happy */
_hfree(ptr);
}
#endif /* M_I86 */
#endif /* SYS16BIT */
#ifndef MY_ZCALLOC /* Any system without a special alloc function */
#ifndef STDC
extern voidp malloc OF((uInt size));
extern voidp calloc OF((uInt items, uInt size));
extern void free OF((voidpf ptr));
#endif
voidpf ZLIB_INTERNAL zcalloc (opaque, items, size)
voidpf opaque;
unsigned items;
unsigned size;
{
if (opaque) items += size - size; /* make compiler happy */
return sizeof(uInt) > 2 ? (voidpf)malloc(items * size) :
(voidpf)calloc(items, size);
}
void ZLIB_INTERNAL zcfree (opaque, ptr)
voidpf opaque;
voidpf ptr;
{
free(ptr);
if (opaque) return; /* make compiler happy */
}
#endif /* MY_ZCALLOC */
#endif /* !Z_SOLO */

View File

@@ -1,253 +0,0 @@
/* zutil.h -- internal interface and configuration of the compression library
* Copyright (C) 1995-2013 Jean-loup Gailly.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* WARNING: this file should *not* be used by applications. It is
part of the implementation of the compression library and is
subject to change. Applications should only use zlib.h.
*/
/* @(#) $Id$ */
#ifndef ZUTIL_H
#define ZUTIL_H
#ifdef HAVE_HIDDEN
# define ZLIB_INTERNAL __attribute__((visibility ("hidden")))
#else
# define ZLIB_INTERNAL
#endif
#include "zlib.h"
#if defined(STDC) && !defined(Z_SOLO)
# if !(defined(_WIN32_WCE) && defined(_MSC_VER))
# include <stddef.h>
# endif
# include <string.h>
# include <stdlib.h>
#endif
#ifdef Z_SOLO
typedef long ptrdiff_t; /* guess -- will be caught if guess is wrong */
#endif
#ifndef local
# define local static
#endif
/* compile with -Dlocal if your debugger can't find static symbols */
typedef unsigned char uch;
typedef uch FAR uchf;
typedef unsigned short ush;
typedef ush FAR ushf;
typedef unsigned long ulg;
extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
/* (size given to avoid silly warnings with Visual C++) */
#define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
#define ERR_RETURN(strm,err) \
return (strm->msg = ERR_MSG(err), (err))
/* To be used only when the state is known to be valid */
/* common constants */
#ifndef DEF_WBITS
# define DEF_WBITS MAX_WBITS
#endif
/* default windowBits for decompression. MAX_WBITS is for compression only */
#if MAX_MEM_LEVEL >= 8
# define DEF_MEM_LEVEL 8
#else
# define DEF_MEM_LEVEL MAX_MEM_LEVEL
#endif
/* default memLevel */
#define STORED_BLOCK 0
#define STATIC_TREES 1
#define DYN_TREES 2
/* The three kinds of block type */
#define MIN_MATCH 3
#define MAX_MATCH 258
/* The minimum and maximum match lengths */
#define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
/* target dependencies */
#if defined(MSDOS) || (defined(WINDOWS) && !defined(WIN32))
# define OS_CODE 0x00
# ifndef Z_SOLO
# if defined(__TURBOC__) || defined(__BORLANDC__)
# if (__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__))
/* Allow compilation with ANSI keywords only enabled */
void _Cdecl farfree( void *block );
void *_Cdecl farmalloc( unsigned long nbytes );
# else
# include <alloc.h>
# endif
# else /* MSC or DJGPP */
# include <malloc.h>
# endif
# endif
#endif
#ifdef AMIGA
# define OS_CODE 0x01
#endif
#if defined(VAXC) || defined(VMS)
# define OS_CODE 0x02
# define F_OPEN(name, mode) \
fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512")
#endif
#if defined(ATARI) || defined(atarist)
# define OS_CODE 0x05
#endif
#ifdef OS2
# define OS_CODE 0x06
# if defined(M_I86) && !defined(Z_SOLO)
# include <malloc.h>
# endif
#endif
#if defined(MACOS) || defined(TARGET_OS_MAC)
# define OS_CODE 0x07
# ifndef Z_SOLO
# if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os
# include <unix.h> /* for fdopen */
# else
# ifndef fdopen
# define fdopen(fd,mode) NULL /* No fdopen() */
# endif
# endif
# endif
#endif
#ifdef TOPS20
# define OS_CODE 0x0a
#endif
#ifdef WIN32
# ifndef __CYGWIN__ /* Cygwin is Unix, not Win32 */
# define OS_CODE 0x0b
# endif
#endif
#ifdef __50SERIES /* Prime/PRIMOS */
# define OS_CODE 0x0f
#endif
#if defined(_BEOS_) || defined(RISCOS)
# define fdopen(fd,mode) NULL /* No fdopen() */
#endif
#if (defined(_MSC_VER) && (_MSC_VER > 600)) && !defined __INTERIX
# if defined(_WIN32_WCE)
# define fdopen(fd,mode) NULL /* No fdopen() */
# ifndef _PTRDIFF_T_DEFINED
typedef int ptrdiff_t;
# define _PTRDIFF_T_DEFINED
# endif
# else
# define fdopen(fd,type) _fdopen(fd,type)
# endif
#endif
#if defined(__BORLANDC__) && !defined(MSDOS)
#pragma warn -8004
#pragma warn -8008
#pragma warn -8066
#endif
/* provide prototypes for these when building zlib without LFS */
#if !defined(_WIN32) && \
(!defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0)
ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
#endif
/* common defaults */
#ifndef OS_CODE
# define OS_CODE 0x03 /* assume Unix */
#endif
#ifndef F_OPEN
# define F_OPEN(name, mode) fopen((name), (mode))
#endif
/* functions */
#if defined(pyr) || defined(Z_SOLO)
# define NO_MEMCPY
#endif
#if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__)
/* Use our own functions for small and medium model with MSC <= 5.0.
* You may have to use the same strategy for Borland C (untested).
* The __SC__ check is for Symantec.
*/
# define NO_MEMCPY
#endif
#if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
# define HAVE_MEMCPY
#endif
#ifdef HAVE_MEMCPY
# ifdef SMALL_MEDIUM /* MSDOS small or medium model */
# define zmemcpy _fmemcpy
# define zmemcmp _fmemcmp
# define zmemzero(dest, len) _fmemset(dest, 0, len)
# else
# define zmemcpy memcpy
# define zmemcmp memcmp
# define zmemzero(dest, len) memset(dest, 0, len)
# endif
#else
void ZLIB_INTERNAL zmemcpy OF((Bytef* dest, const Bytef* source, uInt len));
int ZLIB_INTERNAL zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len));
void ZLIB_INTERNAL zmemzero OF((Bytef* dest, uInt len));
#endif
/* Diagnostic functions */
#ifdef DEBUG
# include <stdio.h>
extern int ZLIB_INTERNAL z_verbose;
extern void ZLIB_INTERNAL z_error OF((char *m));
# define Assert(cond,msg) {if(!(cond)) z_error(msg);}
# define Trace(x) {if (z_verbose>=0) fprintf x ;}
# define Tracev(x) {if (z_verbose>0) fprintf x ;}
# define Tracevv(x) {if (z_verbose>1) fprintf x ;}
# define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;}
# define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;}
#else
# define Assert(cond,msg)
# define Trace(x)
# define Tracev(x)
# define Tracevv(x)
# define Tracec(c,x)
# define Tracecv(c,x)
#endif
#ifndef Z_SOLO
voidpf ZLIB_INTERNAL zcalloc OF((voidpf opaque, unsigned items,
unsigned size));
void ZLIB_INTERNAL zcfree OF((voidpf opaque, voidpf ptr));
#endif
#define ZALLOC(strm, items, size) \
(*((strm)->zalloc))((strm)->opaque, (items), (size))
#define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
#define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
/* Reverse the bytes in a 32-bit value */
#define ZSWAP32(q) ((((q) >> 24) & 0xff) + (((q) >> 8) & 0xff00) + \
(((q) & 0xff00) << 8) + (((q) & 0xff) << 24))
#endif /* ZUTIL_H */

View File

@@ -1,56 +0,0 @@
See also http://pvp.haskell.org/faq
0.6.2.1 Herbert Valerio Riedel <hvr@gnu.org> August 2019
* Add support for GHC 8.8 / base-4.13
0.6.2 Herbert Valerio Riedel <hvr@gnu.org> March 2018
* New cabal flag 'pkg-config' for discovering 'zlib` via pkg-config(1) (#16)
* Use CApiFFI where available for cross-compile friendliness (#14)
* Change the window bits range from 8..15 to 9..15 (#11)
0.6.1.2 Herbert Valerio Riedel <hvr@gnu.org> October 2016
* Fix a segfault when reading the stream multithreaded, #7
* New experimental cabal flag 'non-blocking-ffi' for 'safe' ffi calls
0.6.1.1 Duncan Coutts <duncan@community.haskell.org> April 2015
* Fixed building with GHC 7.0 and 7.2
0.6.0.2 Duncan Coutts <duncan@community.haskell.org> April 2015
* Fixed building with GHC 7.0 and 7.2
0.6.1.0 Duncan Coutts <duncan@community.haskell.org> April 2015
* Support for concatenated gzip files (multiple back-to-back streams)
0.6.0.1 Duncan Coutts <duncan@community.haskell.org> April 2015
* Fixed building with older GHC
* Fixed warnings with new GHC
* Fixed building on Windows
* Fixed testsuite
0.6.0.0 Duncan Coutts <duncan@community.haskell.org> April 2015
* New incremental interface for compression and decompression
* Provide access to unconsumed trailing data
* Simplified structured error type, and instance of Exception
* Updated bundled zlib C code to 1.2.8 (used on Windows)
* Fixed memory leak of zlib z_stream C structure
* More derivied instances (Eq, Show, Typeable, Generic)
0.5.4.2 Duncan Coutts <duncan@community.haskell.org> November 2014
* Builds with GHC 7.10
0.5.4.1 Duncan Coutts <duncan@community.haskell.org> February 2013
* Force tail of input when finished decompressing, to help lazy IO
0.5.4.0 Duncan Coutts <duncan@community.haskell.org> September 2012
* New support for zlib custom dictionaries

View File

@@ -1,6 +0,0 @@
module Main where
import qualified Data.ByteString.Lazy as B
import qualified Codec.Compression.GZip as GZip
main = B.interact GZip.decompress

View File

@@ -1,8 +0,0 @@
module Main where
import qualified Data.ByteString.Lazy as B
import qualified Codec.Compression.GZip as GZip
main = B.interact $ GZip.compressWith GZip.defaultCompressParams {
GZip.compressLevel = GZip.BestCompression
}

View File

@@ -1,13 +0,0 @@
module Main where
import System.Environment
import qualified Data.ByteString.Lazy as B
import qualified Codec.Compression.Zlib as Zlib
main = do args <- getArgs
case args of
[] -> B.interact Zlib.compress
["-d"] -> B.interact Zlib.decompress
_ -> do name <- getProgName
error $ "usage: " ++ name ++ " [-d] < source > dest"

View File

@@ -1,45 +0,0 @@
HSC=ghc
CC=gcc # can also use $(HSC)
UNCOMPRESSED=`find .. -type f`
GZIPPED=`find .. -type f -name \*.gz`
HSC_OPTS=-hide-package zlib -i
HSBINS=hszpipe hsgzip hsgunzip test-hs
BINS=$(HSBINS) zpipe
ALL:
clean:
rm -f $(BINS) *.o *.hi
bins: hszpipe zpipe hsgzip hsgunzip
%.hs : %.hsc
hsc2hs --cc=$(HSC) --ld=$(HSC) --lflag=-lz -o $@ $<
hs%: ../examples/%.hs
$(HSC) $(HSC_OPTS) -lz -i.. -o $@ --make $<
$(HSBINS) : ../Codec/Compression/Zlib/Stream.hs
test-hs: Test.hs
$(HSC) $(HSC_OPTS) -Wall -lz -i. -i.. -o $@ --make $<
zpipe: zpipe.c
$(CC) -lz -o zpipe zpipe.c
test: run-test-hs run-test-sh-zpipe run-test-sh-gzip run-test-sh-gunzip
run-test-hs: test-hs
./test-hs
run-test-sh-zpipe: zpipe hszpipe
./zpipe-compress.sh $(UNCOMPRESSED)
run-test-sh-gzip: hsgzip hsgunzip
./gzip-compress.sh $(UNCOMPRESSED)
run-test-sh-gunzip: hsgunzip
./gzip-uncompress.sh $(GZIPPED)

View File

@@ -1,338 +0,0 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE RankNTypes #-}
module Main where
import Codec.Compression.Zlib.Internal
import qualified Codec.Compression.Zlib as Zlib
import qualified Codec.Compression.GZip as GZip
import qualified Codec.Compression.Zlib.Raw as Raw
import Test.Codec.Compression.Zlib.Internal ()
import Test.Codec.Compression.Zlib.Stream ()
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit
import Utils ()
import Control.Monad
import Control.Monad.ST.Lazy (ST)
import Control.Exception
import qualified Data.ByteString.Char8 as BS.Char8
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import System.IO
#if !(MIN_VERSION_base(4,6,0))
import Prelude hiding (catch)
#endif
main :: IO ()
main = defaultMain $
testGroup "zlib tests" [
testGroup "property tests" [
testProperty "decompress . compress = id (standard)" prop_decompress_after_compress,
testProperty "decompress . compress = id (Zlib -> GZipOrZLib)" prop_gziporzlib1,
testProperty "decompress . compress = id (GZip -> GZipOrZlib)" prop_gziporzlib2,
testProperty "concatenated gzip members" prop_gzip_concat,
testProperty "multiple gzip members, boundaries (all 2-chunks)" prop_multiple_members_boundary2,
testProperty "multiple gzip members, boundaries (all 3-chunks)" prop_multiple_members_boundary3,
testProperty "prefixes of valid stream detected as truncated" prop_truncated
],
testGroup "unit tests" [
testCase "simple gzip case" test_simple_gzip,
testCase "detect bad crc" test_bad_crc,
testCase "detect non-gzip" test_non_gzip,
testCase "detect custom dictionary" test_custom_dict,
testCase "dectect inflate with wrong dict" test_wrong_dictionary,
testCase "dectect inflate with right dict" test_right_dictionary,
testCase "handle trailing data" test_trailing_data,
testCase "multiple gzip members" test_multiple_members,
testCase "check small input chunks" test_small_chunks,
testCase "check empty input" test_empty,
testCase "check exception raised" test_exception
]
]
prop_decompress_after_compress :: Format
-> CompressParams
-> DecompressParams
-> Property
prop_decompress_after_compress w cp dp =
(w /= zlibFormat || decompressWindowBits dp >= compressWindowBits cp) &&
(decompressWindowBits dp > compressWindowBits cp) &&
decompressBufferSize dp > 0 && compressBufferSize cp > 0 ==>
liftM2 (==) (decompress w dp . compress w cp) id
prop_gziporzlib1 :: CompressParams
-> DecompressParams
-> Property
prop_gziporzlib1 cp dp =
decompressWindowBits dp > compressWindowBits cp &&
decompressBufferSize dp > 0 && compressBufferSize cp > 0 ==>
liftM2 (==) (decompress gzipOrZlibFormat dp . compress zlibFormat cp) id
prop_gziporzlib2 :: CompressParams
-> DecompressParams
-> Property
prop_gziporzlib2 cp dp =
decompressWindowBits dp >= compressWindowBits cp &&
decompressBufferSize dp > 0 && compressBufferSize cp > 0 ==>
liftM2 (==) (decompress gzipOrZlibFormat dp . compress gzipFormat cp) id
prop_gzip_concat :: CompressParams
-> DecompressParams
-> BL.ByteString
-> Property
prop_gzip_concat cp dp input =
decompressWindowBits dp >= compressWindowBits cp &&
decompressBufferSize dp > 0 && compressBufferSize cp > 0 ==>
let catComp = BL.concat (replicate 5 (compress gzipFormat cp input))
compCat = compress gzipFormat cp (BL.concat (replicate 5 input))
in decompress gzipFormat dp { decompressAllMembers = True } catComp
== decompress gzipFormat dp { decompressAllMembers = True } compCat
prop_multiple_members_boundary2 :: Property
prop_multiple_members_boundary2 =
forAll shortStrings $ \bs ->
all (\c -> decomp c == BL.append bs bs)
(twoChunkSplits (comp bs `BL.append` comp bs))
where
comp = compress gzipFormat defaultCompressParams
decomp = decompress gzipFormat defaultDecompressParams
shortStrings = fmap BL.pack $ listOf arbitrary
prop_multiple_members_boundary3 :: Property
prop_multiple_members_boundary3 =
forAll shortStrings $ \bs ->
all (\c -> decomp c == BL.append bs bs)
(threeChunkSplits (comp bs `BL.append` comp bs))
where
comp = compress gzipFormat defaultCompressParams
decomp = decompress gzipFormat defaultDecompressParams
shortStrings = sized $ \sz -> resize (sz `div` 10) $
fmap BL.pack $ listOf arbitrary
prop_truncated :: Format -> Property
prop_truncated format =
forAll shortStrings $ \bs ->
all (truncated decomp)
(init (BL.inits (comp bs)))
-- All the initial prefixes of a valid compressed stream should be detected
-- as truncated.
where
comp = compress format defaultCompressParams
decomp = decompressST format defaultDecompressParams
truncated :: (forall s. DecompressStream (ST s)) -> BL.ByteString -> Bool
truncated = foldDecompressStreamWithInput (\_ r -> r) (\_ -> False)
(\err -> case err of TruncatedInput -> True; _ -> False)
shortStrings = sized $ \sz -> resize (sz `div` 6) arbitrary
test_simple_gzip :: Assertion
test_simple_gzip =
withSampleData "hello.gz" $ \hnd ->
let decomp = decompressIO gzipFormat defaultDecompressParams
in assertDecompressOk hnd decomp
test_bad_crc :: Assertion
test_bad_crc =
withSampleData "bad-crc.gz" $ \hnd -> do
let decomp = decompressIO gzipFormat defaultDecompressParams
err <- assertDecompressError hnd decomp
msg <- assertDataFormatError err
msg @?= "incorrect data check"
test_non_gzip :: Assertion
test_non_gzip = do
withSampleData "not-gzip" $ \hnd -> do
let decomp = decompressIO gzipFormat defaultDecompressParams
err <- assertDecompressError hnd decomp
msg <- assertDataFormatError err
msg @?= "incorrect header check"
withSampleData "not-gzip" $ \hnd -> do
let decomp = decompressIO zlibFormat defaultDecompressParams
err <- assertDecompressError hnd decomp
msg <- assertDataFormatError err
msg @?= "incorrect header check"
withSampleData "not-gzip" $ \hnd -> do
let decomp = decompressIO rawFormat defaultDecompressParams
err <- assertDecompressError hnd decomp
msg <- assertDataFormatError err
msg @?= "invalid code lengths set"
withSampleData "not-gzip" $ \hnd -> do
let decomp = decompressIO gzipOrZlibFormat defaultDecompressParams
err <- assertDecompressError hnd decomp
msg <- assertDataFormatError err
msg @?= "incorrect header check"
test_custom_dict :: Assertion
test_custom_dict =
withSampleData "custom-dict.zlib" $ \hnd -> do
let decomp = decompressIO zlibFormat defaultDecompressParams
err <- assertDecompressError hnd decomp
err @?= DictionaryRequired
test_wrong_dictionary :: Assertion
test_wrong_dictionary = do
withSampleData "custom-dict.zlib" $ \hnd -> do
let decomp = decompressIO zlibFormat defaultDecompressParams {
decompressDictionary = -- wrong dict!
Just (BS.pack [65,66,67])
}
err <- assertDecompressError hnd decomp
err @?= DictionaryMismatch
test_right_dictionary :: Assertion
test_right_dictionary = do
withSampleData "custom-dict.zlib" $ \hnd -> do
dict <- readSampleData "custom-dict.zlib-dict"
let decomp = decompressIO zlibFormat defaultDecompressParams {
decompressDictionary =
Just (toStrict dict)
}
assertDecompressOk hnd decomp
test_trailing_data :: Assertion
test_trailing_data =
withSampleData "two-files.gz" $ \hnd -> do
let decomp = decompressIO gzipFormat defaultDecompressParams {
decompressAllMembers = False
}
chunks <- assertDecompressOkChunks hnd decomp
case chunks of
[chunk] -> chunk @?= BS.Char8.pack "Test 1"
_ -> assertFailure "expected single chunk"
test_multiple_members :: Assertion
test_multiple_members =
withSampleData "two-files.gz" $ \hnd -> do
let decomp = decompressIO gzipFormat defaultDecompressParams {
decompressAllMembers = True
}
chunks <- assertDecompressOkChunks hnd decomp
case chunks of
[chunk1,
chunk2] -> do chunk1 @?= BS.Char8.pack "Test 1"
chunk2 @?= BS.Char8.pack "Test 2"
_ -> assertFailure "expected two chunks"
test_small_chunks :: Assertion
test_small_chunks = do
uncompressedFile <- readSampleData "not-gzip"
GZip.compress (smallChunks uncompressedFile) @?= GZip.compress uncompressedFile
Zlib.compress (smallChunks uncompressedFile) @?= Zlib.compress uncompressedFile
Raw.compress (smallChunks uncompressedFile) @?= Raw.compress uncompressedFile
GZip.decompress (smallChunks (GZip.compress uncompressedFile)) @?= uncompressedFile
Zlib.decompress (smallChunks (Zlib.compress uncompressedFile)) @?= uncompressedFile
Raw.decompress (smallChunks (Raw.compress uncompressedFile)) @?= uncompressedFile
compressedFile <- readSampleData "hello.gz"
(GZip.decompress . smallChunks) compressedFile @?= GZip.decompress compressedFile
test_empty :: Assertion
test_empty = do
-- Regression test to make sure we only ask for input once in the case of
-- initially empty input. We previously asked for input twice before
-- returning the error.
let decomp = decompressIO zlibFormat defaultDecompressParams
case decomp of
DecompressInputRequired next -> do
decomp' <- next BS.empty
case decomp' of
DecompressStreamError TruncatedInput -> return ()
_ -> assertFailure "expected truncated error"
_ -> assertFailure "expected input"
test_exception :: Assertion
test_exception =
(do
compressedFile <- readSampleData "bad-crc.gz"
_ <- evaluate (BL.length (GZip.decompress compressedFile))
assertFailure "expected exception")
`catch` \err -> do
msg <- assertDataFormatError err
msg @?= "incorrect data check"
toStrict :: BL.ByteString -> BS.ByteString
#if MIN_VERSION_bytestring(0,10,0)
toStrict = BL.toStrict
#else
toStrict = BS.concat . BL.toChunks
#endif
-----------------------
-- Chunk boundary utils
smallChunks :: BL.ByteString -> BL.ByteString
smallChunks = BL.fromChunks . map (\c -> BS.pack [c]) . BL.unpack
twoChunkSplits :: BL.ByteString -> [BL.ByteString]
twoChunkSplits bs = zipWith (\a b -> BL.fromChunks [a,b]) (BS.inits sbs) (BS.tails sbs)
where
sbs = toStrict bs
threeChunkSplits :: BL.ByteString -> [BL.ByteString]
threeChunkSplits bs =
[ BL.fromChunks [a,b,c]
| (a,x) <- zip (BS.inits sbs) (BS.tails sbs)
, (b,c) <- zip (BS.inits x) (BS.tails x) ]
where
sbs = toStrict bs
--------------
-- HUnit Utils
readSampleData :: FilePath -> IO BL.ByteString
readSampleData file = BL.readFile ("test/data/" ++ file)
withSampleData :: FilePath -> (Handle -> IO a) -> IO a
withSampleData file = withFile ("test/data/" ++ file) ReadMode
expected :: String -> String -> IO a
expected e g = assertFailure ("expected: " ++ e ++ "\nbut got: " ++ g)
>> fail ""
assertDecompressOk :: Handle -> DecompressStream IO -> Assertion
assertDecompressOk hnd =
foldDecompressStream
(BS.hGet hnd 4000 >>=)
(\_ r -> r)
(\_ -> return ())
(\err -> expected "decompress ok" (show err))
assertDecompressOkChunks :: Handle -> DecompressStream IO -> IO [BS.ByteString]
assertDecompressOkChunks hnd =
foldDecompressStream
(BS.hGet hnd 4000 >>=)
(\chunk -> liftM (chunk:))
(\_ -> return [])
(\err -> expected "decompress ok" (show err))
assertDecompressError :: Handle -> DecompressStream IO -> IO DecompressError
assertDecompressError hnd =
foldDecompressStream
(BS.hGet hnd 4000 >>=)
(\_ r -> r)
(\_ -> expected "StreamError" "StreamEnd")
return
assertDataFormatError :: DecompressError -> IO String
assertDataFormatError (DataFormatError detail) = return detail
assertDataFormatError _ = assertFailure "expected DataError"
>> return ""

View File

@@ -1,32 +0,0 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Test code and properties for "Codec.Compression.Zlib.Internal"
--
module Test.Codec.Compression.Zlib.Internal where
import Codec.Compression.Zlib.Internal
import Test.Codec.Compression.Zlib.Stream ()
import Test.QuickCheck
import Control.Monad (ap)
instance Arbitrary CompressParams where
arbitrary = return CompressParams `ap` arbitrary `ap` arbitrary
`ap` arbitrary `ap` arbitrary
`ap` arbitrary `ap` arbitraryBufferSize
`ap` return Nothing
arbitraryBufferSize :: Gen Int
arbitraryBufferSize = frequency $ [(10, return n) | n <- [1..1024]] ++
[(20, return n) | n <- [1025..8192]] ++
[(40, return n) | n <- [8193..131072]] ++
[(1, return n) | n <- [131072..1048576]]
instance Arbitrary DecompressParams where
arbitrary = return DecompressParams `ap` arbitrary
`ap` arbitraryBufferSize
`ap` return Nothing
`ap` arbitrary

View File

@@ -1,40 +0,0 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Test code and properties for "Codec.Compression.Zlib.Stream"
--
module Test.Codec.Compression.Zlib.Stream where
import Codec.Compression.Zlib.Internal
import Test.QuickCheck
instance Arbitrary Format where
-- GZipOrZlib omitted since it's not symmetric
arbitrary = elements [gzipFormat, zlibFormat, rawFormat]
instance Arbitrary Method where
arbitrary = return deflateMethod
instance Arbitrary CompressionLevel where
arbitrary = elements $ [defaultCompression, noCompression,
bestCompression, bestSpeed]
++ map compressionLevel [1..9]
instance Arbitrary WindowBits where
arbitrary = elements $ defaultWindowBits:map windowBits [9..15]
instance Arbitrary MemoryLevel where
arbitrary = elements $ [defaultMemoryLevel, minMemoryLevel, maxMemoryLevel]
++ [memoryLevel n | n <- [1..9]]
instance Arbitrary CompressionStrategy where
arbitrary = elements $ [defaultStrategy, filteredStrategy, huffmanOnlyStrategy]
-- These are disabled by default in the package
-- as they are only available with zlib >=1.2
-- ++ [RLE, Fixed]

View File

@@ -1,28 +0,0 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Utils where
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import Test.QuickCheck
-------------------
-- QuickCheck Utils
maxStrSize :: Double
maxStrSize = 500
-- convert a QC size parameter into one for generating long lists,
-- growing inverse exponentially up to maxStrSize
strSize :: Int -> Int
strSize n = floor (maxStrSize * (1 - 2 ** (-fromIntegral n/100)))
instance Arbitrary BL.ByteString where
arbitrary = sized $ \sz -> fmap BL.fromChunks $ listOf $ resize (sz `div` 2) arbitrary
shrink = map BL.pack . shrink . BL.unpack
instance Arbitrary BS.ByteString where
arbitrary = sized $ \sz -> resize (strSize sz) $ fmap BS.pack $ listOf $ arbitrary
shrink = map BS.pack . shrink . BS.unpack

Binary file not shown.

Binary file not shown.

View File

@@ -1 +0,0 @@
Haskell

Binary file not shown.

View File

@@ -1 +0,0 @@
This is not a gzip file!

Binary file not shown.

View File

@@ -1,14 +0,0 @@
#!/usr/bin/env bash
set -e
for d in $*; do
# they don't actually produce the same output
# cmp <(./hsgzip < $d) <(gzip -c < $d)
cmp $d <(gzip -c < $d | gunzip -c) # just in case..
cmp $d <(gzip -c < $d | ./hsgunzip )
cmp $d <(./hsgzip < $d | gunzip -c)
cmp $d <(./hsgzip < $d | ./hsgunzip)
echo -n '.'
done
echo

View File

@@ -1,16 +0,0 @@
#!/usr/bin/env bash
for d in $*; do
if gunzip -c < $d &> /dev/null; then
if cmp <(./hsgunzip < $d) <(gunzip -c < $d) > /dev/null; then
echo -n '.'
else
echo '#'
echo "FAILED: $d"
exit 1
fi
else
echo -n '_'
fi
done
echo

View File

@@ -1,11 +0,0 @@
#!/usr/bin/env bash
set -e
for d in $*; do
cmp <(./hszpipe < $d) <(./zpipe < $d)
cmp $d <(./zpipe < $d | ./zpipe -d) # just in case..
cmp $d <(./zpipe < $d | ./hszpipe -d)
echo -n .
done
echo

View File

@@ -1,205 +0,0 @@
/* zpipe.c: example of proper use of zlib's inflate() and deflate()
Not copyrighted -- provided to the public domain
Version 1.4 11 December 2005 Mark Adler */
/* Version history:
1.0 30 Oct 2004 First version
1.1 8 Nov 2004 Add void casting for unused return values
Use switch statement for inflate() return values
1.2 9 Nov 2004 Add assertions to document zlib guarantees
1.3 6 Apr 2005 Remove incorrect assertion in inf()
1.4 11 Dec 2005 Add hack to avoid MSDOS end-of-line conversions
Avoid some compiler warnings for input and output buffers
*/
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include "zlib.h"
#if defined(MSDOS) || defined(OS2) || defined(WIN32) || defined(__CYGWIN__)
# include <fcntl.h>
# include <io.h>
# define SET_BINARY_MODE(file) setmode(fileno(file), O_BINARY)
#else
# define SET_BINARY_MODE(file)
#endif
#define CHUNK 16384
/* Compress from file source to file dest until EOF on source.
def() returns Z_OK on success, Z_MEM_ERROR if memory could not be
allocated for processing, Z_STREAM_ERROR if an invalid compression
level is supplied, Z_VERSION_ERROR if the version of zlib.h and the
version of the library linked do not match, or Z_ERRNO if there is
an error reading or writing the files. */
int def(FILE *source, FILE *dest, int level)
{
int ret, flush;
unsigned have;
z_stream strm;
unsigned char in[CHUNK];
unsigned char out[CHUNK];
/* allocate deflate state */
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
ret = deflateInit(&strm, level);
if (ret != Z_OK)
return ret;
/* compress until end of file */
do {
strm.avail_in = fread(in, 1, CHUNK, source);
if (ferror(source)) {
(void)deflateEnd(&strm);
return Z_ERRNO;
}
flush = feof(source) ? Z_FINISH : Z_NO_FLUSH;
strm.next_in = in;
/* run deflate() on input until output buffer not full, finish
compression if all of source has been read in */
do {
strm.avail_out = CHUNK;
strm.next_out = out;
ret = deflate(&strm, flush); /* no bad return value */
assert(ret != Z_STREAM_ERROR); /* state not clobbered */
have = CHUNK - strm.avail_out;
if (fwrite(out, 1, have, dest) != have || ferror(dest)) {
(void)deflateEnd(&strm);
return Z_ERRNO;
}
} while (strm.avail_out == 0);
assert(strm.avail_in == 0); /* all input will be used */
/* done when last data in file processed */
} while (flush != Z_FINISH);
assert(ret == Z_STREAM_END); /* stream will be complete */
/* clean up and return */
(void)deflateEnd(&strm);
return Z_OK;
}
/* Decompress from file source to file dest until stream ends or EOF.
inf() returns Z_OK on success, Z_MEM_ERROR if memory could not be
allocated for processing, Z_DATA_ERROR if the deflate data is
invalid or incomplete, Z_VERSION_ERROR if the version of zlib.h and
the version of the library linked do not match, or Z_ERRNO if there
is an error reading or writing the files. */
int inf(FILE *source, FILE *dest)
{
int ret;
unsigned have;
z_stream strm;
unsigned char in[CHUNK];
unsigned char out[CHUNK];
/* allocate inflate state */
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
strm.avail_in = 0;
strm.next_in = Z_NULL;
ret = inflateInit(&strm);
if (ret != Z_OK)
return ret;
/* decompress until deflate stream ends or end of file */
do {
strm.avail_in = fread(in, 1, CHUNK, source);
if (ferror(source)) {
(void)inflateEnd(&strm);
return Z_ERRNO;
}
if (strm.avail_in == 0)
break;
strm.next_in = in;
/* run inflate() on input until output buffer not full */
do {
strm.avail_out = CHUNK;
strm.next_out = out;
ret = inflate(&strm, Z_NO_FLUSH);
assert(ret != Z_STREAM_ERROR); /* state not clobbered */
switch (ret) {
case Z_NEED_DICT:
ret = Z_DATA_ERROR; /* and fall through */
case Z_DATA_ERROR:
case Z_MEM_ERROR:
(void)inflateEnd(&strm);
return ret;
}
have = CHUNK - strm.avail_out;
if (fwrite(out, 1, have, dest) != have || ferror(dest)) {
(void)inflateEnd(&strm);
return Z_ERRNO;
}
} while (strm.avail_out == 0);
/* done when inflate() says it's done */
} while (ret != Z_STREAM_END);
/* clean up and return */
(void)inflateEnd(&strm);
return ret == Z_STREAM_END ? Z_OK : Z_DATA_ERROR;
}
/* report a zlib or i/o error */
void zerr(int ret)
{
fputs("zpipe: ", stderr);
switch (ret) {
case Z_ERRNO:
if (ferror(stdin))
fputs("error reading stdin\n", stderr);
if (ferror(stdout))
fputs("error writing stdout\n", stderr);
break;
case Z_STREAM_ERROR:
fputs("invalid compression level\n", stderr);
break;
case Z_DATA_ERROR:
fputs("invalid or incomplete deflate data\n", stderr);
break;
case Z_MEM_ERROR:
fputs("out of memory\n", stderr);
break;
case Z_VERSION_ERROR:
fputs("zlib version mismatch!\n", stderr);
}
}
/* compress or decompress from stdin to stdout */
int main(int argc, char **argv)
{
int ret;
/* avoid end-of-line conversions */
SET_BINARY_MODE(stdin);
SET_BINARY_MODE(stdout);
/* do compression if no arguments */
if (argc == 1) {
ret = def(stdin, stdout, Z_DEFAULT_COMPRESSION);
if (ret != Z_OK)
zerr(ret);
return ret;
}
/* do decompression if -d specified */
else if (argc == 2 && strcmp(argv[1], "-d") == 0) {
ret = inf(stdin, stdout);
if (ret != Z_OK)
zerr(ret);
return ret;
}
/* otherwise, report usage */
else {
fputs("zpipe usage: zpipe [-d] < source > dest\n", stderr);
return 1;
}
}

View File

@@ -1,116 +0,0 @@
cabal-version: >= 1.10
name: zlib
version: 0.6.2.1
copyright: (c) 2006-2016 Duncan Coutts
license: BSD3
license-file: LICENSE
author: Duncan Coutts <duncan@community.haskell.org>
maintainer: Duncan Coutts <duncan@community.haskell.org>
bug-reports: https://github.com/haskell/zlib/issues
category: Codec
synopsis: Compression and decompression in the gzip and zlib formats
description: This package provides a pure interface for compressing and
decompressing streams of data represented as lazy
'ByteString's. It uses the
<https://en.wikipedia.org/wiki/Zlib zlib C library>
so it has high performance. It supports the \"zlib\",
\"gzip\" and \"raw\" compression formats.
.
It provides a convenient high level API suitable for most
tasks and for the few cases where more control is needed it
provides access to the full zlib feature set.
build-type: Simple
tested-with: GHC ==7.0.4, GHC ==7.2.2, GHC ==7.4.2, GHC ==7.6.3, GHC ==7.8.4, GHC ==7.10.3, GHC ==8.0.1, GHC==8.0.2, GHC ==8.2.2, GHC ==8.4.4, GHC==8.6.5, GHC==8.8.3, GHC==8.10.1
extra-source-files: changelog
-- zlib C sources (for Windows)
cbits/crc32.h cbits/inffast.h cbits/inflate.h
cbits/trees.h cbits/deflate.h cbits/inffixed.h
cbits/inftrees.h cbits/zutil.h cbits/gzguts.h
-- test data files
test/data/bad-crc.gz test/data/custom-dict.zlib
test/data/custom-dict.zlib-dict test/data/hello.gz
test/data/not-gzip test/data/two-files.gz
-- demo programs:
examples/gzip.hs examples/gunzip.hs
source-repository head
type: git
location: https://github.com/haskell/zlib.git
flag non-blocking-ffi
default: False
manual: True
description: The (de)compression calls can sometimes take a long time, which
prevents other Haskell threads running. Enabling this flag
avoids this unfairness, but with greater overall cost.
flag pkg-config
default: False
manual: True
description: Use @pkg-config(1)@ to locate foreign @zlib@ library.
flag static
default: False
manual: True
description: Use the bundled sources. Requires pkg-config to be False. For windows, this is the default.
library
exposed-modules: Codec.Compression.GZip,
Codec.Compression.Zlib,
Codec.Compression.Zlib.Raw,
Codec.Compression.Zlib.Internal
other-modules: Codec.Compression.Zlib.Stream
if impl(ghc < 7)
default-language: Haskell98
default-extensions: PatternGuards
else
default-language: Haskell2010
other-extensions: CPP, ForeignFunctionInterface, RankNTypes, BangPatterns,
DeriveDataTypeable
if impl(ghc >= 7.2)
other-extensions: DeriveGeneric
if impl(ghc >= 7.6)
other-extensions: CApiFFI
build-depends: base >= 4 && < 4.15,
bytestring >= 0.9 && < 0.11
if impl(ghc >= 7.2 && < 7.6)
build-depends: ghc-prim
includes: zlib.h
ghc-options: -Wall -fwarn-tabs
if flag(non-blocking-ffi)
cpp-options: -DNON_BLOCKING_FFI
if flag(pkg-config)
-- NB: pkg-config is available on windows as well when using msys2
pkgconfig-depends: zlib
else
-- don't use pkg-config
if !os(windows) && !flag(static)
-- Normally we use the the standard system zlib.
extra-libraries: z
else
-- However for the benefit of users of Windows (which does not have zlib
-- by default) we bundle a complete copy of the C sources of zlib-1.2.8
c-sources: cbits/adler32.c cbits/compress.c cbits/crc32.c
cbits/deflate.c cbits/infback.c
cbits/inffast.c cbits/inflate.c cbits/inftrees.c
cbits/trees.c cbits/uncompr.c cbits/zutil.c
include-dirs: cbits
install-includes: zlib.h zconf.h
test-suite tests
type: exitcode-stdio-1.0
main-is: Test.hs
other-modules: Utils,
Test.Codec.Compression.Zlib.Internal,
Test.Codec.Compression.Zlib.Stream
hs-source-dirs: test
default-language: Haskell2010
build-depends: base, bytestring, zlib,
QuickCheck == 2.*,
tasty >= 0.8 && < 1.4,
tasty-quickcheck >= 0.8 && < 0.11,
tasty-hunit >= 0.8 && < 0.11
ghc-options: -Wall

View File

@@ -1,5 +1,64 @@
# Revision history for ghcup
## 0.1.14 -- 2021-03-07
* Major bugfix: fix handling of stray versions wrt [#116](https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/116)
* Fix error messages and overhaul pretty printing wrt [#115](https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/115)
## 0.1.13 -- 2021-02-26
* Support ARMv7/AARCH64
* Add command line completions for installed and available versions wrt [MR #70](https://gitlab.haskell.org/haskell/ghcup-hs/-/merge_requests/70)
* Allow to cycle through set tools wrt [#114](https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/114)
* Fix item selection with unavailable versions wrt [#107](https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/107)
* Allow for dynamic post-install, post-remove and pre-compile msgs wrt [MR #68](https://gitlab.haskell.org/haskell/ghcup-hs/-/merge_requests/68)
* Alert user if upgraded ghcup is shadowed by old ghcup wrt [#111](https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/111)
* Fix to `ghcup` directory creation and placement for the XDG install mode ([MR #49](https://gitlab.haskell.org/haskell/ghcup-hs/-/merge_requests/49))
* Do 755 permissions on executables, wrt #97
* Add [NO_COLOR](https://no-color.org/) support wrt [MR #47](https://gitlab.haskell.org/haskell/ghcup-hs/-/merge_requests/47)
## 0.1.12 -- 2020-11-21
* Fix disappearing HLS symlinks wrt #91
* improve TUI:
- separators between tools sections
- reverse list order so latest is on top
- expand the blues selected bar
- show new latest versions in bright white
* allow configuration file and setting TUI hotkeys wrt #41
- see https://gitlab.haskell.org/haskell/ghcup-hs#configuration for a more in-depth explanation
* add a `--set` switch to `ghcup install ghc` to automatically set as default after install
* emit warnings when CC/LD is set wrt #82
* add support for version ranges in distro specifiers wrt #84
- e.g. `"(>= 19 && <= 20) || ==0.2.2"` is a valid version key for distro
## 0.1.11 -- 2020-09-23
* Add support for installing haskell-language-server, wrt #65
* When compiling GHC from source create a bindist first, store that bindist in `~/.ghcup/cache` and install it, wrt #51
* Allow to compile over existing version (`ghcup compile ghc -v 8.6.5 -b 8.6.5`) and replace it wrt #59
* simplify installing from custom bindist wrt #60
- `ghcup install ghc -u <url> <version>`
* fix bug when cabal isn't marked executable in bindist
* fix bug when `~/.ghcup` is a valid symlink wrt #49
* Drop support for compiling cabal from source (the old bootstrap script is discontinued)
## 0.1.10 -- 2020-08-14
* Show stray Cabals (useful for pre-releases or compiled ones)
## 0.1.9 -- 2020-08-14
* Fix bug when uninstalling all cabal versions
* Fix bug when setting a non-installed ghc version as current default
* Use yaml instead of generated json for download info for ease of adding new GHC versions #44
* Allow pre-release versions of GHC/cabal
* Add XDG dirs support (set `GHCUP_USE_XDG_DIRS`) wrt #39
* Allow to specify regex for tarball subdir (e.g. `ghc-.*`)
* Allow installing arbitrary bindists more seamlessly:
- e.g. installing GHC HEAD: `ghcup -n install ghc -u '{"dlHash": "", "dlSubdir": { "RegexDir": "ghc-.*"}, "dlUri": "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/master/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27" }' head`
* Avoid duplicate edits to .bashrc/.zshrc wrt #43
## 0.1.8 -- 2020-07-21
* Fix bug in logging thread dying on newlines

View File

@@ -52,8 +52,19 @@ organised tree-ish in `GHCup.Utils` and `GHCup.Utils.*`.
Anything dealing with ghcup specific directories is in
`GHCup.Utils.Dirs`.
Download information on where to fetch bindists from is in
`GHCup.Data.GHCupDownloads`.
Download information on where to fetch bindists from is in the appropriate
yaml files: `ghcup-<yaml-ver>.yaml`.
## Common Tasks
### Adding a new GHC version
1. open the latest `ghcup-<yaml-ver>.yaml`
2. find the latest ghc version (in yaml tree e.g. `ghcupDownloads -> GHC -> 8.10.3`)
3. copy-paste it
4. adjust the version, tags, changelog, source url
5. adjust the various bindist urls (make sure to also change the yaml anchors)
6. run `cabal run exe:ghcup-gen -- check-tarballs -f ghcup-<yaml-ver>.yaml -u 'ghc-8\.10\.4'`
## Major refactors

View File

@@ -9,10 +9,17 @@ Similar in scope to [rustup](https://github.com/rust-lang-nursery/rustup.rs), [p
## Table of Contents
* [Installation](#installation)
* [Simple bootstrap](#simple-bootstrap)
* [Manual install](#manual-install)
* [Vim integration](#vim-integration)
* [Usage](#usage)
* [Configuration](#configuration)
* [Manpages](#manpages)
* [Shell-completion](#shell-completion)
* [Cross support](#cross-support)
* [XDG support](#xdg-support)
* [Env variables](#env-variables)
* [Installing custom bindists](#installing-custom-bindists)
* [Design goals](#design-goals)
* [How](#how)
* [Known users](#known-users)
@@ -36,6 +43,10 @@ Then adjust your `PATH` in `~/.bashrc` (or similar, depending on your shell) lik
export PATH="$HOME/.cabal/bin:$HOME/.ghcup/bin:$PATH"
```
### Vim integration
See [ghcup.vim](https://github.com/hasufell/ghcup.vim).
## Usage
See `ghcup --help`.
@@ -71,6 +82,13 @@ ghcup upgrade
Generally this is meant to be used with [`cabal-install`](https://hackage.haskell.org/package/cabal-install), which
handles your haskell packages and can demand that [a specific version](https://cabal.readthedocs.io/en/latest/nix-local-build.html#cfg-flag---with-compiler) of `ghc` is available, which `ghcup` can do.
### Configuration
A configuration file can be put in `~/.ghcup/config.yaml`. The default config file
explaining all possible configurations can be found in this repo: [config.yaml](./config.yaml).
Partial configuration is fine. Command line options always overwrite the config file settings.
### Manpages
For man pages to work you need [man-db](http://man-db.nongnu.org/) as your `man` provider, then issue `man ghc`. Manpages only work for the currently set ghc.
@@ -96,6 +114,44 @@ For distributions with non-standard locations of cross toolchain and
libraries, this may need some tweaking of `build.mk` or configure args.
See `ghcup compile ghc --help` for further information.
### XDG support
To enable XDG style directories, set the environment variable `GHCUP_USE_XDG_DIRS` to anything.
Then you can control the locations via XDG environment variables as such:
* `XDG_DATA_HOME`: GHCs will be unpacked in `ghcup/ghc` subdir (default: `~/.local/share`)
* `XDG_CACHE_HOME`: logs and download files will be stored in `ghcup` subdir (default: `~/.cache`)
* `XDG_BIN_HOME`: binaries end up here (default: `~/.local/bin`)
* `XDG_CONFIG_HOME`: the config file is stored in `ghcup` subdir as `config.yaml` (default: `~/.config`)
### Env variables
This is the complete list of env variables that change GHCup behavior:
* `GHCUP_USE_XDG_DIRS`: see [XDG support](#xdg-support) above
* `TMPDIR`: where ghcup does the work (unpacking, building, ...)
* `GHCUP_INSTALL_BASE_PREFIX`: the base of ghcup (default: `$HOME`)
* `GHCUP_CURL_OPTS`: additional options that can be passed to curl
* `GHCUP_WGET_OPTS`: additional options that can be passed to wget
* `CC`/`LD` etc.: full environment is passed to the build system when compiling GHC via GHCup
### Installing custom bindists
There are a couple of good use cases to install custom bindists:
1. manually built bindists (e.g. with patches)
- example: `ghcup install ghc -u 'file:///home/mearwald/tmp/ghc-eff-patches/ghc-8.10.2-x86_64-deb10-linux.tar.xz' 8.10.2-eff`
2. GHC head CI bindists
- example: `ghcup install ghc -u 'https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/master/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27' head`
3. DWARF bindists
- example: `ghcup install ghc -u 'https://downloads.haskell.org/~ghc/8.10.2/ghc-8.10.2-x86_64-deb10-linux-dwarf.tar.xz' 8.10.2-dwarf`
Since the version parser is pretty lax, `8.10.2-eff` and `head` are both valid versions
and produce the binaries `ghc-8.10.2-eff` and `ghc-head` respectively.
GHCup always needs to know which version the bindist corresponds to (this is not automatically
detected).
## Design goals
1. simplicity
@@ -124,10 +180,22 @@ In addition this script can also install `cabal-install`.
## Known users
* Github action [haskell/actions/setup](https://github.com/haskell/actions/tree/main/setup)
* [vabal](https://github.com/Franciman/vabal)
## Known problems
### Custom ghc version names
When installing ghc bindists with custom version names as outlined in
[installing custom bindists](#installing-custom-bindists), then cabal might
be unable to find the correct `ghc-pkg` (also see [#73](https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/73))
if you use `cabal build --with-compiler=ghc-foo`. Instead, point it to the full path, such as:
`cabal build --with-compiler=$HOME/.ghcup/ghc/<version-name>/bin/ghc` or set that GHC version
as the current one via: `ghcup set ghc <version-name>`.
This problem doesn't exist for regularly installed GHC versions.
### Limited distributions supported
Currently only GNU/Linux distributions compatible with the [upstream GHC](https://www.haskell.org/ghc/download_ghc_8_6_1.html#binaries) binaries are supported.

View File

@@ -1,19 +1,19 @@
# RELEASING
1. update `GHCup.Version` module. `ghcupURL` must only be updated if we change the `_toolRequirements` type or the JSON representation of it. The version of the json represents the change increments. `ghcUpVer` is the current application version.
1. update `GHCup.Version` module. `ghcupURL` must only be updated if we change the `_toolRequirements` type or the YAML representation of it. The version of the YAML represents the change increments. `ghcUpVer` is the current application version.
2. Update version in ghcup.cabal
3. Add ChangeLog entry
4. Add/fix downloads to `GHCupDownloads` module, then run `ghcup-gen gen` to generate the new json and validate it via `ghcup-gen check`.
4. Add/fix downloads in `ghcup-<ver>.yaml`, then verify with `ghcup-gen check -f ghcup-<ver>.yaml`
5. Commit and git push with tag. Wait for tests to succeed and release artifacts to build.
6. Download release artifacts and upload them `downloads.haskell.org/ghcup`
7. Add release artifacts to GHCupDownloads (see point 4.)
7. Add release artifacts to yaml file (see point 4.)
8. Upload the final `ghcup-<ver>.json` to `webhost.haskell.org/ghcup/data/`.
8. Upload the final `ghcup-<ver>.yaml` to `webhost.haskell.org/ghcup/data/`.
9. Update bootstrap-haskell and symlinks on `downloads.haskell.org/ghcup`

View File

@@ -10,13 +10,11 @@
module Main where
import GHCup.Data.GHCupInfo
import GHCup.Types
import GHCup.Types.JSON ( )
import GHCup.Utils.Logger
import Data.Aeson ( eitherDecode, encode )
import Data.Aeson.Encode.Pretty
import Data.Char ( toLower )
#if !MIN_VERSION_base(4,13,0)
import Data.Semigroup ( (<>) )
#endif
@@ -24,51 +22,19 @@ import Options.Applicative hiding ( style )
import System.Console.Pretty
import System.Exit
import System.IO ( stdout )
import Text.Regex.Posix
import Validate
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Data.Yaml as Y
data Options = Options
{ optCommand :: Command
}
data Command = GenJSON GenJSONOpts
| ValidateJSON ValidateJSONOpts
| ValidateTarballs ValidateJSONOpts
data Output
= FileOutput FilePath -- optsparse-applicative doesn't handle ByteString correctly anyway
| StdOutput
fileOutput :: Parser Output
fileOutput =
FileOutput
<$> (strOption
(long "file" <> short 'f' <> metavar "FILENAME" <> help
"Output to a file"
)
)
stdOutput :: Parser Output
stdOutput = flag'
StdOutput
(short 'o' <> long "stdout" <> help "Print to stdout (default)")
outputP :: Parser Output
outputP = fileOutput <|> stdOutput
data GenJSONOpts = GenJSONOpts
{ output :: Maybe Output
, pretty :: Bool
}
genJSONOpts :: Parser GenJSONOpts
genJSONOpts = GenJSONOpts <$> optional outputP <*> switch
(short 'p' <> long "pretty" <> help "Make JSON output pretty (human readable)"
)
data Command = ValidateYAML ValidateYAMLOpts
| ValidateTarballs ValidateYAMLOpts TarballFilter
data Input
@@ -78,11 +44,10 @@ data Input
fileInput :: Parser Input
fileInput =
FileInput
<$> (strOption
<$> strOption
(long "file" <> short 'f' <> metavar "FILENAME" <> help
"Input file to validate"
)
)
stdInput :: Parser Input
stdInput = flag'
@@ -92,81 +57,73 @@ stdInput = flag'
inputP :: Parser Input
inputP = fileInput <|> stdInput
data ValidateJSONOpts = ValidateJSONOpts
{ input :: Maybe Input
data ValidateYAMLOpts = ValidateYAMLOpts
{ vInput :: Maybe Input
}
validateJSONOpts :: Parser ValidateJSONOpts
validateJSONOpts = ValidateJSONOpts <$> optional inputP
validateYAMLOpts :: Parser ValidateYAMLOpts
validateYAMLOpts = ValidateYAMLOpts <$> optional inputP
tarballFilterP :: Parser TarballFilter
tarballFilterP = option readm $
long "tarball-filter" <> short 'u' <> metavar "<tool>-<version>" <> value def
<> help "Only check certain tarballs (format: <tool>-<version>)"
where
def = TarballFilter Nothing (makeRegex ("" :: String))
readm = do
s <- str
case span (/= '-') s of
(_, []) -> fail "invalid format, missing '-' after the tool name"
(t, v) | [tool] <- [ tool | tool <- [minBound..maxBound], low (show tool) == low t ] ->
pure (TarballFilter $ Just tool) <*> makeRegexOptsM compIgnoreCase execBlank (drop 1 v)
_ -> fail "invalid tool"
low = fmap toLower
opts :: Parser Options
opts = Options <$> com
com :: Parser Command
com = subparser
( (command
"gen"
( GenJSON
<$> (info (genJSONOpts <**> helper)
(progDesc "Generate the json downloads file")
)
)
)
<> (command
( command
"check"
( ValidateJSON
<$> (info (validateJSONOpts <**> helper)
(progDesc "Validate the JSON")
)
( ValidateYAML
<$> info (validateYAMLOpts <**> helper)
(progDesc "Validate the YAML")
)
)
<> (command
<> command
"check-tarballs"
( ValidateTarballs
<$> (info
(validateJSONOpts <**> helper)
(progDesc "Validate all tarballs (download and checksum)")
)
(info
((ValidateTarballs <$> validateYAMLOpts <*> tarballFilterP) <**> helper)
(progDesc "Validate all tarballs (download and checksum)")
)
)
)
main :: IO ()
main = do
customExecParser (prefs showHelpOnError) (info (opts <**> helper) idm)
_ <- customExecParser (prefs showHelpOnError) (info (opts <**> helper) idm)
>>= \Options {..} -> case optCommand of
GenJSON gopts -> do
let bs True =
encodePretty' (defConfig { confIndent = Spaces 2 }) ghcupInfo
bs False = encode ghcupInfo
case gopts of
GenJSONOpts { output = Nothing, pretty } ->
L.hPutStr stdout (bs pretty)
GenJSONOpts { output = Just StdOutput, pretty } ->
L.hPutStr stdout (bs pretty)
GenJSONOpts { output = Just (FileOutput file), pretty } ->
L.writeFile file (bs pretty)
ValidateJSON vopts -> case vopts of
ValidateJSONOpts { input = Nothing } ->
L.getContents >>= valAndExit validate
ValidateJSONOpts { input = Just StdInput } ->
L.getContents >>= valAndExit validate
ValidateJSONOpts { input = Just (FileInput file) } ->
L.readFile file >>= valAndExit validate
ValidateTarballs vopts -> case vopts of
ValidateJSONOpts { input = Nothing } ->
L.getContents >>= valAndExit validateTarballs
ValidateJSONOpts { input = Just StdInput } ->
L.getContents >>= valAndExit validateTarballs
ValidateJSONOpts { input = Just (FileInput file) } ->
L.readFile file >>= valAndExit validateTarballs
ValidateYAML vopts -> case vopts of
ValidateYAMLOpts { vInput = Nothing } ->
B.getContents >>= valAndExit validate
ValidateYAMLOpts { vInput = Just StdInput } ->
B.getContents >>= valAndExit validate
ValidateYAMLOpts { vInput = Just (FileInput file) } ->
B.readFile file >>= valAndExit validate
ValidateTarballs vopts tarballFilter -> case vopts of
ValidateYAMLOpts { vInput = Nothing } ->
B.getContents >>= valAndExit (validateTarballs tarballFilter)
ValidateYAMLOpts { vInput = Just StdInput } ->
B.getContents >>= valAndExit (validateTarballs tarballFilter)
ValidateYAMLOpts { vInput = Just (FileInput file) } ->
B.readFile file >>= valAndExit (validateTarballs tarballFilter)
pure ()
where
valAndExit f contents = do
(GHCupInfo _ av) <- case eitherDecode contents of
(GHCupInfo _ av) <- case Y.decodeEither' contents of
Right r -> pure r
Left e -> die (color Red $ show e)
myLoggerT (LoggerConfig True (B.hPut stdout) (\_ -> pure ())) (f av)

View File

@@ -7,7 +7,10 @@ module Validate where
import GHCup
import GHCup.Download
import GHCup.Types
import GHCup.Types.Optics
import GHCup.Utils.Dirs
import GHCup.Utils.Logger
import GHCup.Utils.Version.QQ
import Control.Exception.Safe
import Control.Monad
@@ -19,6 +22,7 @@ import Control.Monad.Trans.Reader ( runReaderT )
import Control.Monad.Trans.Resource ( runResourceT
, MonadUnliftIO
)
import Data.Containers.ListUtils ( nubOrd )
import Data.IORef
import Data.List
import Data.String.Interpolate
@@ -28,6 +32,8 @@ import Optics
import System.Exit
import System.IO
import Text.ParserCombinators.ReadP
import Text.PrettyPrint.HughesPJClass ( prettyShow )
import Text.Regex.Posix
import qualified Data.ByteString as B
import qualified Data.Map.Strict as M
@@ -53,7 +59,7 @@ validate :: (Monad m, MonadLogger m, MonadThrow m, MonadIO m, MonadUnliftIO m)
validate dls = do
ref <- liftIO $ newIORef 0
-- * verify binary downloads * --
-- verify binary downloads --
flip runReaderT ref $ do
-- unique tags
forM_ (M.toList dls) $ \(t, _) -> checkUniqueTags t
@@ -62,7 +68,7 @@ validate dls = do
forM_ (M.toList dls) $ \(t, versions) ->
forM_ (M.toList versions) $ \(v, vi) ->
forM_ (M.toList $ _viArch vi) $ \(arch, pspecs) -> do
checkHasRequiredPlatforms t v arch (M.keys pspecs)
checkHasRequiredPlatforms t v (_viTags vi) arch (M.keys pspecs)
checkGHCVerIsValid
forM_ (M.toList dls) $ \(t, _) -> checkMandatoryTags t
@@ -76,17 +82,30 @@ validate dls = do
lift $ $(logInfo) [i|All good|]
pure ExitSuccess
where
checkHasRequiredPlatforms t v arch pspecs = do
checkHasRequiredPlatforms t v tags arch pspecs = do
let v' = prettyVer v
when (not $ any (== Linux UnknownLinux) pspecs) $ do
arch' = prettyShow arch
when (notElem (Linux UnknownLinux) pspecs) $ do
lift $ $(logError)
[i|Linux UnknownLinux missing for for #{t} #{v'} #{arch}|]
[i|Linux UnknownLinux missing for for #{t} #{v'} #{arch'}|]
addError
when ((not $ any (== Darwin) pspecs) && arch == A_64) $ do
lift $ $(logError) [i|Darwin missing for #{t} #{v'} #{arch}|]
when ((notElem Darwin pspecs) && arch == A_64) $ do
lift $ $(logError) [i|Darwin missing for #{t} #{v'} #{arch'}|]
addError
when ((not $ any (== FreeBSD) pspecs) && arch == A_64) $ lift $ $(logWarn)
[i|FreeBSD missing for #{t} #{v'} #{arch}|]
when ((notElem FreeBSD pspecs) && arch == A_64) $ lift $ $(logWarn)
[i|FreeBSD missing for #{t} #{v'} #{arch'}|]
-- alpine needs to be set explicitly, because
-- we cannot assume that "Linux UnknownLinux" runs on Alpine
-- (although it could be static)
when (notElem (Linux Alpine) pspecs) $
case t of
GHCup | arch `elem` [A_64, A_32] -> lift ($(logError) [i|Linux Alpine missing for #{t} #{v'} #{arch}|]) >> addError
Cabal | v > [vver|2.4.1.0|]
, arch `elem` [A_64, A_32] -> lift ($(logError) [i|Linux Alpine missing for #{t} #{v'} #{arch'}|]) >> addError
GHC | Latest `elem` tags || Recommended `elem` tags
, arch `elem` [A_64, A_32] -> lift ($(logError) [i|Linux Alpine missing for #{t} #{v'} #{arch'}|])
_ -> lift $ $(logWarn) [i|Linux Alpine missing for #{t} #{v'} #{arch'}|]
checkUniqueTags tool = do
let allTags = join $ M.elems $ availableToolVersions dls tool
@@ -97,7 +116,7 @@ validate dls = do
(\case
[] -> throwM $ InternalError "empty inner list"
(t : ts) ->
pure $ (t, ) $ if isUniqueTag t then ts == [] else True
pure $ (t, ) (not (isUniqueTag t) || null ts)
)
. group
. sort
@@ -111,6 +130,8 @@ validate dls = do
where
isUniqueTag Latest = True
isUniqueTag Recommended = True
isUniqueTag Old = False
isUniqueTag Prerelease = False
isUniqueTag (Base _) = False
isUniqueTag (UnknownTag _) = False
@@ -144,6 +165,11 @@ validate dls = do
isBase (Base _) = True
isBase _ = False
data TarballFilter = TarballFilter
{ tfTool :: Maybe Tool
, tfVersion :: Regex
}
validateTarballs :: ( Monad m
, MonadLogger m
, MonadThrow m
@@ -151,23 +177,20 @@ validateTarballs :: ( Monad m
, MonadUnliftIO m
, MonadMask m
)
=> GHCupDownloads
=> TarballFilter
-> GHCupDownloads
-> m ExitCode
validateTarballs dls = do
validateTarballs (TarballFilter tool versionRegex) dls = do
ref <- liftIO $ newIORef 0
flip runReaderT ref $ do
-- download/verify all binary tarballs
let
dlbis = nub $ join $ (M.elems dls) <&> \versions ->
join $ (M.elems versions) <&> \vi ->
join $ (M.elems $ _viArch vi) <&> \pspecs ->
join $ (M.elems pspecs) <&> \pverspecs -> (M.elems pverspecs)
forM_ dlbis $ downloadAll
let dlsrc = nub $ join $ (M.elems dls) <&> \versions ->
join $ (M.elems versions) <&> maybe [] (: []) . _viSourceDL
forM_ dlsrc $ downloadAll
-- download/verify all tarballs
let dlis = nubOrd $ dls ^.. each
%& indices (maybe (const True) (==) tool) %> each
%& indices (matchTest versionRegex . T.unpack . prettyVer)
% (viSourceDL % _Just `summing` viArch % each % each % each)
when (null dlis) $ $(logError) [i|no tarballs selected by filter|] *> addError
forM_ dlis downloadAll
-- exit
e <- liftIO $ readIORef ref
@@ -178,12 +201,13 @@ validateTarballs dls = do
pure ExitSuccess
where
runLogger = myLoggerT LoggerConfig { lcPrintDebug = True
, colorOutter = B.hPut stderr
, rawOutter = \_ -> pure ()
}
downloadAll dli = do
let settings = Settings True False Never Curl False
let runLogger = myLoggerT LoggerConfig { lcPrintDebug = True
, colorOutter = B.hPut stderr
, rawOutter = (\_ -> pure ())
}
dirs <- liftIO getDirs
let settings = AppState (Settings True False Never Curl False GHCupURL) dirs defaultKeyBindings
r <-
runLogger

View File

@@ -1,9 +1,11 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE ViewPatterns #-}
module BrickMain where
@@ -19,7 +21,10 @@ import Brick
import Brick.Widgets.Border
import Brick.Widgets.Border.Style
import Brick.Widgets.Center
import Brick.Widgets.List
import Brick.Widgets.List ( listSelectedFocusedAttr
, listSelectedAttr
, listAttr
)
#if !defined(TAR)
import Codec.Archive
#endif
@@ -31,59 +36,114 @@ import Data.Bool
import Data.Functor
import Data.List
import Data.Maybe
import Data.Char
import Data.IORef
import Data.String.Interpolate
import Data.Vector ( Vector )
import Data.Vector ( Vector
, (!?)
)
import Data.Versions hiding ( str )
import Haskus.Utils.Variant.Excepts
import Prelude hiding ( appendFile )
import System.Environment
import System.Exit
import System.IO.Unsafe
import Text.PrettyPrint.HughesPJClass ( prettyShow )
import URI.ByteString
import qualified GHCup.Types as GT
import qualified Data.Text as T
import qualified Graphics.Vty as Vty
import qualified Data.Vector as V
data AppState = AppState {
lr :: LR
, dls :: GHCupDownloads
data BrickData = BrickData
{ lr :: [ListResult]
, dls :: GHCupDownloads
, pfreq :: PlatformRequest
}
}
deriving Show
type LR = GenericList String Vector ListResult
data BrickSettings = BrickSettings
{ showAll :: Bool
}
deriving Show
data BrickInternalState = BrickInternalState
{ clr :: Vector ListResult
, ix :: Int
}
deriving Show
data BrickState = BrickState
{ appData :: BrickData
, appSettings :: BrickSettings
, appState :: BrickInternalState
, appKeys :: KeyBindings
}
deriving Show
keyHandlers :: [(Char, String, AppState -> EventM n (Next AppState))]
keyHandlers =
[ ('q', "Quit" , halt)
, ('i', "Install" , withIOAction install')
, ('u', "Uninstall", withIOAction del')
, ('s', "Set" , withIOAction set')
, ('c', "ChangeLog", withIOAction changelog')
]
ui :: AppState -> Widget String
ui AppState {..} =
( padBottom Max
$ ( withBorderStyle unicode
$ borderWithLabel (str "GHCup")
$ (center $ renderList renderItem True lr)
)
keyHandlers :: KeyBindings
-> [ ( Vty.Key
, BrickSettings -> String
, BrickState -> EventM n (Next BrickState)
)
]
keyHandlers KeyBindings {..} =
[ (bQuit, const "Quit" , halt)
, (bInstall, const "Install" , withIOAction install')
, (bUninstall, const "Uninstall", withIOAction del')
, (bSet, const "Set" , withIOAction set')
, (bChangelog, const "ChangeLog", withIOAction changelog')
, ( bShowAll
, \BrickSettings {..} ->
if showAll then "Hide old versions" else "Show all versions"
, hideShowHandler
)
<=> ( withAttr "help"
. txtWrap
. T.pack
. foldr1 (\x y -> x <> " " <> y)
. (++ ["↑↓:Navigation"])
$ (fmap (\(c, s, _) -> (c : ':' : s)) keyHandlers)
)
, (bUp, const "Up", \BrickState {..} -> continue BrickState{ appState = moveCursor 1 appState Up, .. })
, (bDown, const "Down", \BrickState {..} -> continue BrickState{ appState = moveCursor 1 appState Down, .. })
]
where
hideShowHandler BrickState{..} =
let newAppSettings = appSettings { showAll = not . showAll $ appSettings }
newInternalState = constructList appData newAppSettings (Just appState)
in continue (BrickState appData newAppSettings newInternalState appKeys)
showKey :: Vty.Key -> String
showKey (Vty.KChar c) = [c]
showKey Vty.KUp = ""
showKey Vty.KDown = ""
showKey key = tail (show key)
ui :: AttrMap -> BrickState -> Widget String
ui dimAttrs BrickState{ appSettings = as@BrickSettings{}, ..}
= padBottom Max
( withBorderStyle unicode
$ borderWithLabel (str "GHCup")
(center (header <=> hBorder <=> renderList' appState))
)
<=> footer
where
renderItem b ListResult {..} =
footer =
withAttr "help"
. txtWrap
. T.pack
. foldr1 (\x y -> x <> " " <> y)
. fmap (\(key, s, _) -> showKey key <> ":" <> s as)
$ keyHandlers appKeys
header =
minHSize 2 emptyWidget
<+> padLeft (Pad 2) (minHSize 6 $ str "Tool")
<+> minHSize 15 (str "Version")
<+> padLeft (Pad 1) (minHSize 25 $ str "Tags")
<+> padLeft (Pad 5) (str "Notes")
renderList' = withDefAttr listAttr . drawListElements renderItem True
renderItem _ b listResult@ListResult{..} =
let marks = if
| lSet -> (withAttr "set" $ str "✔✔")
| lInstalled -> (withAttr "installed" $ str "")
@@ -91,149 +151,300 @@ ui AppState {..} =
ver = case lCross of
Nothing -> T.unpack . prettyVer $ lVer
Just c -> T.unpack (c <> "-" <> prettyVer lVer)
dim = if lNoBindist
then updateAttrMap (const dimAttributes) . withAttr "no-bindist"
else id
in dim
dim
| lNoBindist && not lInstalled
&& not b -- TODO: overloading dim and active ignores active
-- so we hack around it here
= updateAttrMap (const dimAttrs) . withAttr "no-bindist"
| otherwise = id
hooray
| elem Latest lTag && not lInstalled =
withAttr "hooray"
| otherwise = id
active = if b then forceAttr "active" else id
in hooray $ active $ dim
( marks
<+> ( padLeft (Pad 2)
$ minHSize 20
$ ((if b then withAttr "active" else id)
(str $ (fmap toLower . show $ lTool) <> " " <> ver)
)
<+> padLeft (Pad 2)
( minHSize 6
(printTool lTool)
)
<+> minHSize 15 (str ver)
<+> (let l = catMaybes . fmap printTag $ sort lTag
in padLeft (Pad 1) $ minHSize 25 $ if null l
then emptyWidget
else foldr1 (\x y -> x <+> str "," <+> y) l
)
<+> (padLeft (Pad 1) $ if null lTag
then emptyWidget
else
foldr1 (\x y -> x <+> str "," <+> y)
$ (fmap printTag $ sort lTag)
<+> padLeft (Pad 5)
( let notes = printNotes listResult
in if null notes
then emptyWidget
else foldr1 (\x y -> x <+> str "," <+> y) notes
)
<+> vLimit 1 (fill ' ')
)
printTag Recommended = withAttr "recommended" $ str "recommended"
printTag Latest = withAttr "latest" $ str "latest"
printTag (Base pvp'') = str ("base-" ++ T.unpack (prettyPVP pvp''))
printTag (UnknownTag t ) = str t
printTag Recommended = Just $ withAttr "recommended" $ str "recommended"
printTag Latest = Just $ withAttr "latest" $ str "latest"
printTag Prerelease = Just $ withAttr "prerelease" $ str "prerelease"
printTag (Base pvp'') = Just $ str ("base-" ++ T.unpack (prettyPVP pvp''))
printTag Old = Nothing
printTag (UnknownTag t) = Just $ str t
printTool Cabal = str "cabal"
printTool GHC = str "GHC"
printTool GHCup = str "GHCup"
printTool HLS = str "HLS"
printNotes ListResult {..} =
(if hlsPowered then [withAttr "hls-powered" $ str "hls-powered"] else mempty
)
++ (if fromSrc then [withAttr "compiled" $ str "compiled"] else mempty)
++ (if lStray then [withAttr "stray" $ str "stray"] else mempty)
-- | Draws the list elements.
--
-- Evaluates the underlying container up to, and a bit beyond, the
-- selected element. The exact amount depends on available height
-- for drawing and 'listItemHeight'. At most, it will evaluate up to
-- element @(i + h + 1)@ where @i@ is the selected index and @h@ is the
-- available height.
drawListElements :: (Int -> Bool -> ListResult -> Widget String)
-> Bool
-> BrickInternalState
-> Widget String
drawListElements drawElem foc is@(BrickInternalState clr _) =
Widget Greedy Greedy $
let
es = clr
listSelected = fmap fst $ listSelectedElement' is
drawnElements = flip V.imap es $ \i' e ->
let addSeparator w = case es !? (i' - 1) of
Just e' | lTool e' /= lTool e ->
hBorder <=> w
_ -> w
isSelected = Just i' == listSelected
elemWidget = drawElem i' isSelected e
selItemAttr = if foc
then withDefAttr listSelectedFocusedAttr
else withDefAttr listSelectedAttr
makeVisible = if isSelected then visible . selItemAttr else id
in addSeparator $ makeVisible elemWidget
in render
$ viewport "GHCup" Vertical
$ vBox
$ V.toList drawnElements
minHSize :: Int -> Widget n -> Widget n
minHSize s' = hLimit s' . vLimit 1 . (<+> fill ' ')
app :: App AppState e String
app = App { appDraw = \st -> [ui st]
, appHandleEvent = eventHandler
, appStartEvent = return
, appAttrMap = const defaultAttributes
, appChooseCursor = neverShowCursor
}
app :: AttrMap -> AttrMap -> App BrickState e String
app attrs dimAttrs =
App { appDraw = \st -> [ui dimAttrs st]
, appHandleEvent = eventHandler
, appStartEvent = return
, appAttrMap = const attrs
, appChooseCursor = neverShowCursor
}
defaultAttributes :: AttrMap
defaultAttributes = attrMap
defaultAttributes :: Bool -> AttrMap
defaultAttributes no_color = attrMap
Vty.defAttr
[ ("active" , Vty.defAttr `Vty.withBackColor` Vty.blue)
, ("not-installed", Vty.defAttr `Vty.withForeColor` Vty.red)
, ("set" , Vty.defAttr `Vty.withForeColor` Vty.green)
, ("installed" , Vty.defAttr `Vty.withForeColor` Vty.green)
, ("recommended" , Vty.defAttr `Vty.withForeColor` Vty.green)
, ("latest" , Vty.defAttr `Vty.withForeColor` Vty.yellow)
, ("help" , Vty.defAttr `Vty.withStyle` Vty.italic)
[ ("active" , Vty.defAttr `withBackColor` Vty.blue)
, ("not-installed", Vty.defAttr `withForeColor` Vty.red)
, ("set" , Vty.defAttr `withForeColor` Vty.green)
, ("installed" , Vty.defAttr `withForeColor` Vty.green)
, ("recommended" , Vty.defAttr `withForeColor` Vty.green)
, ("hls-powered" , Vty.defAttr `withForeColor` Vty.green)
, ("latest" , Vty.defAttr `withForeColor` Vty.yellow)
, ("prerelease" , Vty.defAttr `withForeColor` Vty.red)
, ("compiled" , Vty.defAttr `withForeColor` Vty.blue)
, ("stray" , Vty.defAttr `withForeColor` Vty.blue)
, ("help" , Vty.defAttr `withStyle` Vty.italic)
, ("hooray" , Vty.defAttr `withForeColor` Vty.brightWhite)
]
where
withForeColor | no_color = const
| otherwise = Vty.withForeColor
withBackColor | no_color = \attr _ -> attr `Vty.withStyle` Vty.reverseVideo
| otherwise = Vty.withBackColor
dimAttributes :: AttrMap
dimAttributes = attrMap
withStyle = Vty.withStyle
dimAttributes :: Bool -> AttrMap
dimAttributes no_color = attrMap
(Vty.defAttr `Vty.withStyle` Vty.dim)
[ ("active" , Vty.defAttr `Vty.withBackColor` Vty.blue)
[ ("active" , Vty.defAttr `withBackColor` Vty.blue) -- has no effect ??
, ("no-bindist", Vty.defAttr `Vty.withStyle` Vty.dim)
]
where
withBackColor | no_color = \attr _ -> attr `Vty.withStyle` Vty.reverseVideo
| otherwise = Vty.withBackColor
eventHandler :: BrickState -> BrickEvent n e -> EventM n (Next BrickState)
eventHandler st@BrickState{..} ev = do
AppState { keyBindings = kb } <- liftIO $ readIORef settings'
case ev of
(MouseDown _ Vty.BScrollUp _ _) ->
continue (BrickState { appState = moveCursor 1 appState Up, .. })
(MouseDown _ Vty.BScrollDown _ _) ->
continue (BrickState { appState = moveCursor 1 appState Down, .. })
(VtyEvent (Vty.EvResize _ _)) -> continue st
(VtyEvent (Vty.EvKey Vty.KUp _)) ->
continue BrickState{ appState = moveCursor 1 appState Up, .. }
(VtyEvent (Vty.EvKey Vty.KDown _)) ->
continue BrickState{ appState = moveCursor 1 appState Down, .. }
(VtyEvent (Vty.EvKey key _)) ->
case find (\(key', _, _) -> key' == key) (keyHandlers kb) of
Nothing -> continue st
Just (_, _, handler) -> handler st
_ -> continue st
eventHandler :: AppState -> BrickEvent n e -> EventM n (Next AppState)
eventHandler st (VtyEvent (Vty.EvResize _ _)) = continue st
eventHandler st (VtyEvent (Vty.EvKey (Vty.KChar 'q') _)) = halt st
eventHandler st (VtyEvent (Vty.EvKey Vty.KEsc _)) = halt st
eventHandler AppState {..} (VtyEvent (Vty.EvKey (Vty.KUp) _)) =
continue (AppState (listMoveUp lr) dls pfreq)
eventHandler AppState {..} (VtyEvent (Vty.EvKey (Vty.KDown) _)) =
continue (AppState (listMoveDown lr) dls pfreq)
eventHandler as (VtyEvent (Vty.EvKey (Vty.KChar c) _)) =
case find (\(c', _, _) -> c' == c) keyHandlers of
Nothing -> continue as
Just (_, _, handler) -> handler as
eventHandler st _ = continue st
moveCursor :: Int -> BrickInternalState -> Direction -> BrickInternalState
moveCursor steps ais@BrickInternalState{..} direction =
let newIx = if direction == Down then ix + steps else ix - steps
in case clr !? newIx of
Just _ -> BrickInternalState { ix = newIx, .. }
Nothing -> ais
-- | Suspend the current UI and run an IO action in terminal. If the
-- IO action returns a Left value, then it's thrown as userError.
withIOAction :: (AppState -> (Int, ListResult) -> IO (Either String a))
-> AppState
-> EventM n (Next AppState)
withIOAction action as = case listSelectedElement (lr as) of
withIOAction :: (BrickState -> (Int, ListResult) -> IO (Either String a))
-> BrickState
-> EventM n (Next BrickState)
withIOAction action as = case listSelectedElement' (appState as) of
Nothing -> continue as
Just (ix, e) -> suspendAndResume $ do
r <- action as (ix, e)
case r of
Left err -> throwIO $ userError err
Right _ -> do
apps <- (fmap . fmap)
(\AppState {..} -> AppState { lr = listMoveTo ix lr, .. })
$ getAppState Nothing (pfreq as)
case apps of
Right nas -> do
putStrLn "Press enter to continue"
_ <- getLine
pure nas
Left err -> throwIO $ userError err
action as (ix, e) >>= \case
Left err -> putStrLn ("Error: " <> err)
Right _ -> putStrLn "Success"
getAppData Nothing (pfreq . appData $ as) >>= \case
Right data' -> do
putStrLn "Press enter to continue"
_ <- getLine
pure (updateList data' as)
Left err -> throwIO $ userError err
install' :: AppState -> (Int, ListResult) -> IO (Either String ())
install' AppState {..} (_, ListResult {..}) = do
-- | Update app data and list internal state based on new evidence.
-- This synchronises @BrickInternalState@ with @BrickData@
-- and @BrickSettings@.
updateList :: BrickData -> BrickState -> BrickState
updateList appD BrickState{..} =
let newInternalState = constructList appD appSettings (Just appState)
in BrickState { appState = newInternalState
, appData = appD
, appSettings = appSettings
, appKeys = appKeys
}
constructList :: BrickData
-> BrickSettings
-> Maybe BrickInternalState
-> BrickInternalState
constructList appD appSettings =
replaceLR (filterVisible (showAll appSettings)) (lr appD)
listSelectedElement' :: BrickInternalState -> Maybe (Int, ListResult)
listSelectedElement' BrickInternalState{..} = fmap (ix, ) $ clr !? ix
selectLatest :: Vector ListResult -> Int
selectLatest v =
case V.findIndex (\ListResult {..} -> lTool == GHC && Latest `elem` lTag) v of
Just ix -> ix
Nothing -> 0
-- | Replace the @appState@ or construct it based on a filter function
-- and a new @[ListResult]@ evidence.
-- When passed an existing @appState@, tries to keep the selected element.
replaceLR :: (ListResult -> Bool)
-> [ListResult]
-> Maybe BrickInternalState
-> BrickInternalState
replaceLR filterF lr s =
let oldElem = s >>= listSelectedElement'
newVec = V.fromList . filter filterF $ lr
newSelected =
case oldElem >>= \(_, oldE) -> V.findIndex (toolEqual oldE) newVec of
Just ix -> ix
Nothing -> selectLatest newVec
in BrickInternalState newVec newSelected
where
toolEqual e1 e2 =
lTool e1 == lTool e2 && lVer e1 == lVer e2 && lCross e1 == lCross e2
filterVisible :: Bool -> ListResult -> Bool
filterVisible showAll e | lInstalled e = True
| showAll = True
| otherwise = not (elem Old (lTag e))
install' :: BrickState -> (Int, ListResult) -> IO (Either String ())
install' BrickState { appData = BrickData {..} } (_, ListResult {..}) = do
settings <- readIORef settings'
l <- readIORef logger'
let runLogger = myLoggerT l
let
run =
runLogger
. flip runReaderT settings
. runResourceT
. runE
@'[AlreadyInstalled
, UnknownArchive
let run =
runLogger
. flip runReaderT settings
. runResourceT
. runE
@'[ AlreadyInstalled
#if !defined(TAR)
, ArchiveResult
, ArchiveResult
#endif
, FileDoesNotExistError
, CopyError
, NoDownload
, NotInstalled
, BuildFailed
, TagNotFound
, DigestError
, DownloadFailed
, NoUpdate]
, UnknownArchive
, FileDoesNotExistError
, CopyError
, NoDownload
, NotInstalled
, BuildFailed
, TagNotFound
, DigestError
, DownloadFailed
, NoUpdate
, TarDirDoesNotExist
]
(run $ do
run (do
case lTool of
GHC -> liftE $ installGHCBin dls lVer pfreq
Cabal -> liftE $ installCabalBin dls lVer pfreq
GHCup -> liftE $ upgradeGHCup dls Nothing False pfreq $> ()
GHC -> do
let vi = getVersionInfo lVer GHC dls
liftE $ installGHCBin dls lVer pfreq $> vi
Cabal -> do
let vi = getVersionInfo lVer Cabal dls
liftE $ installCabalBin dls lVer pfreq $> vi
GHCup -> do
let vi = snd <$> getLatest dls GHCup
liftE $ upgradeGHCup dls Nothing False pfreq $> vi
HLS -> do
let vi = getVersionInfo lVer HLS dls
liftE $ installHLSBin dls lVer pfreq $> vi
)
>>= \case
VRight _ -> pure $ Right ()
VRight vi -> do
forM_ (_viPostInstall =<< vi) $ \msg ->
runLogger $ $(logInfo) msg
pure $ Right ()
VLeft (V (AlreadyInstalled _ _)) -> pure $ Right ()
VLeft (V (BuildFailed _ e)) ->
pure $ Left [i|Build failed with #{e}|]
VLeft (V NoDownload) ->
pure $ Left [i|No available version for #{prettyVer lVer}|]
VLeft (V NoUpdate) -> pure $ Right ()
VLeft e -> pure $ Left [i|#{e}
VLeft e -> pure $ Left [i|#{prettyShow e}
Also check the logs in ~/.ghcup/logs|]
set' :: AppState -> (Int, ListResult) -> IO (Either String ())
set' :: BrickState -> (Int, ListResult) -> IO (Either String ())
set' _ (_, ListResult {..}) = do
settings <- readIORef settings'
l <- readIORef logger'
@@ -242,68 +453,74 @@ set' _ (_, ListResult {..}) = do
let run =
runLogger
. flip runReaderT settings
. runE @'[FileDoesNotExistError, NotInstalled, TagNotFound]
. runE @'[FileDoesNotExistError , NotInstalled , TagNotFound]
(run $ do
run (do
case lTool of
GHC -> liftE $ setGHC (GHCTargetVersion lCross lVer) SetGHCOnly $> ()
Cabal -> liftE $ setCabal lVer $> ()
HLS -> liftE $ setHLS lVer $> ()
GHCup -> pure ()
)
>>= \case
VRight _ -> pure $ Right ()
VLeft e -> pure $ Left [i|#{e}|]
VLeft e -> pure $ Left (prettyShow e)
del' :: AppState -> (Int, ListResult) -> IO (Either String ())
del' _ (_, ListResult {..}) = do
del' :: BrickState -> (Int, ListResult) -> IO (Either String ())
del' BrickState { appData = BrickData {..} } (_, ListResult {..}) = do
settings <- readIORef settings'
l <- readIORef logger'
let runLogger = myLoggerT l
let run = runLogger . flip runReaderT settings . runE @'[NotInstalled]
(run $ do
run (do
let vi = getVersionInfo lVer lTool dls
case lTool of
GHC -> liftE $ rmGHCVer (GHCTargetVersion lCross lVer) $> ()
Cabal -> liftE $ rmCabalVer lVer $> ()
GHCup -> pure ()
GHC -> liftE $ rmGHCVer (GHCTargetVersion lCross lVer) $> vi
Cabal -> liftE $ rmCabalVer lVer $> vi
HLS -> liftE $ rmHLSVer lVer $> vi
GHCup -> pure Nothing
)
>>= \case
VRight _ -> pure $ Right ()
VLeft e -> pure $ Left [i|#{e}|]
VRight vi -> do
forM_ (join $ fmap _viPostRemove vi) $ \msg ->
runLogger $ $(logInfo) msg
pure $ Right ()
VLeft e -> pure $ Left (prettyShow e)
changelog' :: AppState -> (Int, ListResult) -> IO (Either String ())
changelog' AppState {..} (_, ListResult {..}) = do
changelog' :: BrickState -> (Int, ListResult) -> IO (Either String ())
changelog' BrickState { appData = BrickData {..} } (_, ListResult {..}) = do
case getChangeLog dls lTool (Left lVer) of
Nothing -> pure $ Left
[i|Could not find ChangeLog for #{lTool}, version #{prettyVer lVer}|]
Just uri -> do
let cmd = case _rPlatform pfreq of
Darwin -> "open"
Linux _ -> "xdg-open"
FreeBSD -> "xdg-open"
Darwin -> "open"
Linux _ -> "xdg-open"
FreeBSD -> "xdg-open"
exec cmd True [serializeURIRef' uri] Nothing Nothing >>= \case
Right _ -> pure $ Right ()
Left e -> pure $ Left [i|#{e}|]
Left e -> pure $ Left $ prettyShow e
uri' :: IORef (Maybe URI)
{-# NOINLINE uri' #-}
uri' = unsafePerformIO (newIORef Nothing)
settings' :: IORef Settings
settings' :: IORef AppState
{-# NOINLINE settings' #-}
settings' = unsafePerformIO
(newIORef Settings { cache = True
, noVerify = False
, keepDirs = Never
, downloader = Curl
, verbose = False
}
)
settings' = unsafePerformIO $ do
dirs <- getDirs
newIORef $ AppState (Settings { cache = True
, noVerify = False
, keepDirs = Never
, downloader = Curl
, verbose = False
, urlSource = GHCupURL
, ..
})
dirs
defaultKeyBindings
logger' :: IORef LoggerConfig
@@ -316,32 +533,42 @@ logger' = unsafePerformIO
)
brickMain :: Settings -> Maybe URI -> LoggerConfig -> GHCupDownloads -> PlatformRequest -> IO ()
brickMain s muri l av pfreq' = do
writeIORef uri' muri
brickMain :: AppState
-> LoggerConfig
-> GHCupDownloads
-> PlatformRequest
-> IO ()
brickMain s l av pfreq' = do
writeIORef settings' s
-- logger interpreter
writeIORef logger' l
let runLogger = myLoggerT l
eApps <- getAppState (Just av) pfreq'
case eApps of
Right as -> defaultMain app (selectLatest as) $> ()
Left e -> do
no_color <- isJust <$> lookupEnv "NO_COLOR"
eAppData <- getAppData (Just av) pfreq'
case eAppData of
Right ad ->
defaultMain
(app (defaultAttributes no_color) (dimAttributes no_color))
(BrickState ad
defaultAppSettings
(constructList ad defaultAppSettings Nothing)
(keyBindings s)
)
$> ()
Left e -> do
runLogger ($(logError) [i|Error building app state: #{show e}|])
exitWith $ ExitFailure 2
where
selectLatest :: AppState -> AppState
selectLatest AppState {..} =
(\ix -> AppState { lr = listMoveTo ix lr, .. })
. fromJust
. V.findIndex (\ListResult {..} -> lTool == GHC && Latest `elem` lTag)
$ (listElements lr)
getAppState :: Maybe GHCupDownloads -> PlatformRequest -> IO (Either String AppState)
getAppState mg pfreq' = do
muri <- readIORef uri'
defaultAppSettings :: BrickSettings
defaultAppSettings = BrickSettings { showAll = False }
getDownloads' :: IO (Either String GHCupDownloads)
getDownloads' = do
settings <- readIORef settings'
l <- readIORef logger'
let runLogger = myLoggerT l
@@ -349,14 +576,30 @@ getAppState mg pfreq' = do
r <-
runLogger
. flip runReaderT settings
. runE
@'[JSONError, DownloadFailed, FileDoesNotExistError]
$ do
dls <- maybe (fmap _ghcupDownloads $ liftE $ getDownloadsF (maybe GHCupURL OwnSource muri)) pure mg
lV <- lift $ listVersions dls Nothing Nothing pfreq'
pure $ (AppState (list "Tool versions" (V.fromList lV) 1) dls pfreq')
. runE @'[JSONError , DownloadFailed , FileDoesNotExistError]
$ fmap _ghcupDownloads
$ liftE
$ getDownloadsF (urlSource . GT.settings $ settings)
case r of
VRight a -> pure $ Right a
VLeft e -> pure $ Left [i|#{e}|]
VLeft e -> pure $ Left (prettyShow e)
getAppData :: Maybe GHCupDownloads
-> PlatformRequest
-> IO (Either String BrickData)
getAppData mg pfreq' = do
settings <- readIORef settings'
l <- readIORef logger'
let runLogger = myLoggerT l
r <- maybe getDownloads' (pure . Right) mg
runLogger . flip runReaderT settings $ do
case r of
Right dls -> do
lV <- listVersions dls Nothing Nothing pfreq'
pure $ Right $ BrickData (reverse lV) dls pfreq'
Left e -> pure $ Left [i|#{e}|]

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,31 @@
#!/bin/sh
# Main settings:
# * BOOTSTRAP_HASKELL_NONINTERACTIVE - any nonzero value for noninteractive installation
# * BOOTSTRAP_HASKELL_NO_UPGRADE - any nonzero value to not trigger the upgrade
# * GHCUP_USE_XDG_DIRS - any nonzero value to respect The XDG Base Directory Specification
# * BOOTSTRAP_HASKELL_VERBOSE - any nonzero value for more verbose installation
# * BOOTSTRAP_HASKELL_GHC_VERSION
# * BOOTSTRAP_HASKELL_CABAL_VERSION
# License: LGPL-3.0
# safety subshell to avoid executing anything in case this script is not downloaded properly
(
: "${GHCUP_INSTALL_BASE_PREFIX:=$HOME}"
export GHCUP_USE_XDG_DIRS
if [ -n "${GHCUP_USE_XDG_DIRS}" ] ; then
GHCUP_DIR=${XDG_DATA_HOME:=$HOME/.local/share}/ghcup
GHCUP_BIN=${XDG_BIN_HOME:=$HOME/.local/bin}
else
GHCUP_DIR=${GHCUP_INSTALL_BASE_PREFIX}/.ghcup
GHCUP_BIN=${GHCUP_INSTALL_BASE_PREFIX}/.ghcup/bin
fi
: "${BOOTSTRAP_HASKELL_GHC_VERSION:=recommended}"
: "${BOOTSTRAP_HASKELL_CABAL_VERSION:=recommended}"
@@ -12,8 +34,7 @@ die() {
exit 2
}
edo()
{
edo() {
"$@" || die "\"$*\" failed!"
}
@@ -29,10 +50,26 @@ _eghcup() {
fi
}
_done() {
echo
echo "All done!"
echo
echo "To start a simple repl, run:"
echo " ghci"
echo
echo "To start a new haskell project in the current directory, run:"
echo " cabal init --interactive"
echo
echo "To install other GHC versions, run:"
echo " ghcup tui"
exit 0
}
download_ghcup() {
_plat="$(uname -s)"
_arch=$(uname -m)
_ghver="0.1.8"
_ghver="0.1.14"
_base_url="https://downloads.haskell.org/~ghcup"
case "${_plat}" in
@@ -52,6 +89,12 @@ download_ghcup() {
i*86)
_url=${_base_url}/${_ghver}/i386-linux-ghcup-${_ghver}
;;
armv7*)
_url=${_base_url}/${_ghver}/armv7-linux-ghcup-${_ghver}
;;
aarch64|arm64|armv8l)
_url=${_base_url}/${_ghver}/aarch64-linux-ghcup-${_ghver}
;;
*) die "Unknown architecture: ${_arch}"
;;
esac
@@ -83,15 +126,16 @@ download_ghcup() {
;;
esac
edo curl -Lf "${_url}" > "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/bin/ghcup
edo curl -Lf "${_url}" > "${GHCUP_BIN}"/ghcup
edo chmod +x "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/bin/ghcup
edo chmod +x "${GHCUP_BIN}"/ghcup
cat <<-EOF > "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/env || die "Failed to create env file"
export PATH="\$HOME/.cabal/bin:\${GHCUP_INSTALL_BASE_PREFIX:=\$HOME}/.ghcup/bin:\$PATH"
edo mkdir -p "${GHCUP_DIR}"
cat <<-EOF > "${GHCUP_DIR}"/env || die "Failed to create env file"
export PATH="\$HOME/.cabal/bin:${GHCUP_BIN}:\$PATH"
EOF
# shellcheck disable=SC1090
edo . "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/env
edo . "${GHCUP_DIR}"/env
eghcup upgrade
unset _plat _arch _url _ghver _base_url
@@ -102,12 +146,19 @@ echo
echo "Welcome to Haskell!"
echo
echo "This script will download and install the following binaries:"
echo " * ghcup - The Haskell toolchain installer (for managing GHC/cabal versions)"
echo " * ghcup - The Haskell toolchain installer"
echo " (for managing GHC/cabal versions)"
echo " * ghc - The Glasgow Haskell Compiler"
echo " * cabal - The Cabal build tool"
echo
echo "ghcup installs only into the following directory, which can be removed anytime:"
echo " $GHCUP_INSTALL_BASE_PREFIX/.ghcup"
if [ -z "${GHCUP_USE_XDG_DIRS}" ] ; then
echo "ghcup installs only into the following directory,"
echo "which can be removed anytime:"
echo " $GHCUP_INSTALL_BASE_PREFIX/.ghcup"
else
echo "ghcup installs into XDG directories as long as"
echo "'GHCUP_USE_XDG_DIRS' is set."
fi
echo
if [ -z "${BOOTSTRAP_HASKELL_NONINTERACTIVE}" ] ; then
@@ -119,7 +170,7 @@ if [ -z "${BOOTSTRAP_HASKELL_NONINTERACTIVE}" ] ; then
read -r answer </dev/tty
fi
edo mkdir -p "${GHCUP_INSTALL_BASE_PREFIX}"/.ghcup/bin
edo mkdir -p "${GHCUP_BIN}"
if command -V "ghcup" >/dev/null 2>&1 ; then
if [ -z "${BOOTSTRAP_HASKELL_NO_UPGRADE}" ] ; then
@@ -154,9 +205,32 @@ printf "\\033[0;35m%s\\033[0m\\n" ""
printf "\\033[0;35m%s\\033[0m\\n" "Installation done!"
printf "\\033[0;35m%s\\033[0m\\n" ""
if [ -z "${BOOTSTRAP_HASKELL_NONINTERACTIVE}" ] ; then
printf "\\033[0;35m%s\\033[0m\\n" "Do you want to install haskell-language-server (HLS) now?"
printf "\\033[0;35m%s\\033[0m\\n" "HLS is a language-server that provides IDE-like functionality"
printf "\\033[0;35m%s\\033[0m\\n" "and can integrate with different editors, such as Vim, Emacs, VS Code, Atom, ..."
printf "\\033[0;35m%s\\033[0m\\n" "Also see https://github.com/haskell/haskell-language-server/blob/master/README.md"
printf "\\033[0;35m%s\\033[0m\\n" ""
printf "\\033[0;35m%s\\033[0m\\n" "Answer with YES or NO and press ENTER."
printf "\\033[0;35m%s\\033[0m\\n" ""
while true; do
read -r hls_answer </dev/tty
case $hls_answer in
[Yy]*)
eghcup --cache install hls
break ;;
[Nn]*)
break ;;
*)
echo "Please type YES or NO and press enter.";;
esac
done
echo "In order to run ghc and cabal, you need to adjust your PATH variable."
echo "You may want to source '$GHCUP_INSTALL_BASE_PREFIX/.ghcup/env' in your shell"
echo "You may want to source '$GHCUP_DIR/env' in your shell"
echo "configuration to do so (e.g. ~/.bashrc)."
case $SHELL in
@@ -174,13 +248,13 @@ if [ -z "${BOOTSTRAP_HASKELL_NONINTERACTIVE}" ] ; then
GHCUP_PROFILE_FILE="$HOME/.zshrc"
MY_SHELL="zsh"
else
exit 0
_done
fi
;;
*/fish) # login shell is fish
GHCUP_PROFILE_FILE="$HOME/.config/fish/config.fish"
MY_SHELL="fish" ;;
*) exit 0 ;;
*) _done ;;
esac
@@ -198,18 +272,38 @@ if [ -z "${BOOTSTRAP_HASKELL_NONINTERACTIVE}" ] ; then
case $MY_SHELL in
"") break ;;
fish)
echo "set -q GHCUP_INSTALL_BASE_PREFIX[1]; or set GHCUP_INSTALL_BASE_PREFIX \$HOME" >> "${GHCUP_PROFILE_FILE}"
echo "test -f \$GHCUP_INSTALL_BASE_PREFIX/.ghcup/env ; and set -gx PATH \$HOME/.cabal/bin \$GHCUP_INSTALL_BASE_PREFIX/.ghcup/bin \$PATH" >> "${GHCUP_PROFILE_FILE}"
if ! grep -q "ghcup-env" "${GHCUP_PROFILE_FILE}" ; then
mkdir -p "${GHCUP_PROFILE_FILE%/*}"
echo "# ghcup-env" >> "${GHCUP_PROFILE_FILE}"
echo "set -q GHCUP_INSTALL_BASE_PREFIX[1]; or set GHCUP_INSTALL_BASE_PREFIX \$HOME" >> "${GHCUP_PROFILE_FILE}"
echo "test -f $GHCUP_DIR/env ; and set -gx PATH \$HOME/.cabal/bin $GHCUP_BIN \$PATH" >> "${GHCUP_PROFILE_FILE}"
fi
break ;;
*)
echo "[ -f \"\${GHCUP_INSTALL_BASE_PREFIX:=\$HOME}/.ghcup/env\" ] && source \"\${GHCUP_INSTALL_BASE_PREFIX:=\$HOME}/.ghcup/env\"" >> "${GHCUP_PROFILE_FILE}"
bash)
if ! grep -q "ghcup-env" "${GHCUP_PROFILE_FILE}" ; then
echo "[ -f \"${GHCUP_DIR}/env\" ] && source \"${GHCUP_DIR}/env\" # ghcup-env" >> "${GHCUP_PROFILE_FILE}"
fi
case "$(uname -s)" in
"Darwin"|"darwin")
if ! grep -q "ghcup-env" "${HOME}/.bash_profile" ; then
echo "[[ -f ~/.bashrc ]] && source ~/.bashrc # ghcup-env" >> "${HOME}/.bash_profile"
fi
;;
esac
break ;;
zsh)
if ! grep -q "ghcup-env" "${GHCUP_PROFILE_FILE}" ; then
echo "[ -f \"${GHCUP_DIR}/env\" ] && source \"${GHCUP_DIR}/env\" # ghcup-env" >> "${GHCUP_PROFILE_FILE}"
fi
break ;;
esac
printf "\\033[0;35m%s\\033[0m\\n" "OK! ${GHCUP_PROFILE_FILE} has been modified. Restart your terminal for the changes to take effect,"
printf "\\033[0;35m%s\\033[0m\\n" "or type \"source ${GHCUP_INSTALL_BASE_PREFIX}/.ghcup/env\" to apply them in your current terminal session."
exit 0;;
printf "\\033[0;35m%s\\033[0m\\n" "or type \"source ${GHCUP_DIR}/env\" to apply them in your current terminal session."
_done
;;
[Nn]*)
exit 0;;
_done ;;
*)
echo "Please type YES or NO and press enter.";;
esac

20
cabal.ghc8104.project Normal file
View File

@@ -0,0 +1,20 @@
packages: ./ghcup.cabal
with-compiler: ghc-8.10.4
optional-packages: ./3rdparty/*/*.cabal
optimization: 2
package streamly
ghc-options: -O2 -fspec-constr-recursive=16 -fmax-worker-args=16
package ghcup
ghc-options: -O2 -fspec-constr-recursive=16 -fmax-worker-args=16
constraints: http-io-streams -brotli
package libarchive
flags: -system-libarchive
allow-newer: base, ghc-prim, template-haskell

View File

@@ -0,0 +1,261 @@
active-repositories: hackage.haskell.org:merge
constraints: any.Cabal ==3.2.1.0,
any.HUnit ==1.6.2.0,
any.IfElse ==0.85,
any.QuickCheck ==2.14.2,
QuickCheck -old-random +templatehaskell,
any.StateVar ==1.2.1,
any.abstract-deque ==0.3,
abstract-deque -usecas,
any.aeson ==1.5.6.0,
aeson -bytestring-builder -cffi -developer -fast,
any.aeson-pretty ==0.8.8,
aeson-pretty -lib-only,
any.alex ==3.2.6,
alex +small_base,
any.ansi-terminal ==0.11,
ansi-terminal -example,
any.ansi-wl-pprint ==0.6.9,
ansi-wl-pprint -example,
any.array ==0.5.4.0,
any.ascii-string ==1.0.1.4,
any.assoc ==1.0.2,
any.async ==2.2.3,
async -bench,
any.atomic-primops ==0.8.4,
atomic-primops -debug,
any.attoparsec ==0.13.2.5,
attoparsec -developer,
any.auto-update ==0.1.6,
any.base ==4.14.1.0,
any.base-compat ==0.11.2,
any.base-compat-batteries ==0.11.2,
any.base-orphans ==0.8.4,
any.base16-bytestring ==1.0.1.0,
any.base64-bytestring ==1.2.0.1,
any.bifunctors ==5.5.10,
bifunctors +semigroups +tagged,
any.binary ==0.8.8.0,
any.blaze-builder ==0.4.2.1,
any.bytestring ==0.10.12.0,
any.bz2 ==1.0.1.0,
bz2 -cross +with-bzlib,
any.c2hs ==0.28.7,
c2hs +base3 -regression,
any.call-stack ==0.3.0,
any.case-insensitive ==1.2.1.0,
any.casing ==0.1.4.1,
any.cereal ==0.5.8.1,
cereal -bytestring-builder,
any.chs-cabal ==0.1.1.0,
any.chs-deps ==0.1.0.0,
chs-deps -cross,
any.clock ==0.8.2,
clock -llvm,
any.cmdargs ==0.10.21,
cmdargs +quotation -testprog,
any.colour ==2.3.5,
any.comonad ==5.0.8,
comonad +containers +distributive +indexed-traversable,
any.composition-prelude ==3.0.0.2,
composition-prelude -development,
any.concurrent-output ==1.10.12,
any.conduit ==1.3.4.1,
any.conduit-extra ==1.3.5,
any.containers ==0.6.2.1,
any.contravariant ==1.5.3,
contravariant +semigroups +statevar +tagged,
any.cryptohash-sha256 ==0.11.102.0,
cryptohash-sha256 -exe +use-cbits,
any.data-default-class ==0.1.2.0,
any.data-fix ==0.3.1,
any.deepseq ==1.4.4.0,
any.deferred-folds ==0.9.17,
any.directory ==1.3.6.0,
any.distributive ==0.6.2.1,
distributive +semigroups +tagged,
any.dlist ==1.0,
dlist -werror,
any.easy-file ==0.2.2,
any.errors ==2.3.0,
any.exceptions ==0.10.4,
any.fast-logger ==3.0.3,
any.filepath ==1.4.2.1,
any.focus ==1.0.2,
any.foldl ==1.4.11,
any.free ==5.1.6,
any.fusion-plugin-types ==0.1.0,
any.generic-arbitrary ==0.1.0,
any.generics-sop ==0.5.1.1,
any.ghc-boot-th ==8.10.4,
any.ghc-prim ==0.6.1,
ghcup -internal-downloader -tar -tui,
any.happy ==1.20.0,
any.hashable ==1.3.1.0,
hashable +integer-gmp,
any.haskell-src-exts ==1.23.1,
any.haskell-src-meta ==0.8.7,
any.haskus-utils-data ==1.4,
any.haskus-utils-types ==1.5.1,
any.haskus-utils-variant ==3.1,
any.heaps ==0.3.6.1,
any.hpath ==0.11.0,
any.hpath-directory ==0.14.1,
any.hpath-filepath ==0.10.4,
any.hpath-io ==0.14.1,
any.hpath-posix ==0.13.2,
any.hsc2hs ==0.68.7,
hsc2hs -in-ghc-tree,
any.hspec ==2.7.8,
any.hspec-core ==2.7.8,
any.hspec-discover ==2.7.8,
any.hspec-expectations ==0.8.2,
any.hspec-golden-aeson ==0.7.0.0,
any.indexed-profunctors ==0.1,
any.indexed-traversable ==0.1.1,
any.indexed-traversable-instances ==0.1,
any.integer-gmp ==1.0.3.0,
any.integer-logarithms ==1.0.3.1,
integer-logarithms -check-bounds +integer-gmp,
any.language-c ==0.8.3,
language-c -allwarnings +iecfpextension +separatesyb +usebytestrings,
any.libarchive ==3.0.2.1,
libarchive -cross -low-memory -system-libarchive,
any.libyaml ==0.1.2,
libyaml -no-unicode -system-libyaml,
any.lifted-base ==0.2.3.12,
any.list-t ==1.0.4,
any.lockfree-queue ==0.2.3.1,
lzma -static,
any.math-functions ==0.3.4.1,
math-functions +system-erf +system-expm1,
any.megaparsec ==9.0.1,
megaparsec -dev,
any.microlens ==0.4.12.0,
any.microlens-mtl ==0.2.0.1,
any.microlens-th ==0.4.3.9,
any.mmorph ==1.1.5,
any.monad-control ==1.0.2.3,
any.monad-logger ==0.3.36,
monad-logger +template_haskell,
any.monad-loops ==0.4.3,
monad-loops +base4,
any.mono-traversable ==1.0.15.1,
any.mtl ==2.2.2,
any.mwc-random ==0.15.0.1,
any.network ==3.1.2.1,
network -devel,
any.old-locale ==1.0.0.7,
any.old-time ==1.1.0.3,
any.optics ==0.4,
any.optics-core ==0.4,
optics-core -explicit-generic-labels,
any.optics-extra ==0.4,
any.optics-th ==0.4,
any.optics-vl ==0.2.1,
any.optparse-applicative ==0.16.1.0,
optparse-applicative +process,
any.os-release ==1.0.2,
os-release -devel,
any.parallel ==3.2.2.0,
any.parsec ==3.1.14.0,
any.parser-combinators ==1.3.0,
parser-combinators -dev,
any.pretty ==1.1.3.6,
any.pretty-terminal ==0.1.0.0,
any.primitive ==0.7.1.0,
any.primitive-extras ==0.8.2,
any.primitive-unlifted ==0.1.3.0,
any.process ==1.6.9.0,
any.profunctors ==5.6.2,
any.quickcheck-arbitrary-adt ==0.3.1.0,
any.quickcheck-io ==0.2.0,
any.random ==1.2.0,
any.recursion-schemes ==5.2.2,
recursion-schemes +template-haskell,
any.regex-posix ==0.96.0.0,
regex-posix -_regex-posix-clib,
any.resourcet ==1.2.4.2,
any.rts ==1.0,
any.safe ==0.3.19,
any.safe-exceptions ==0.1.7.1,
any.scientific ==0.3.6.2,
scientific -bytestring-builder -integer-simple,
any.semigroupoids ==5.3.5,
semigroupoids +comonad +containers +contravariant +distributive +tagged +unordered-containers,
any.setenv ==0.1.1.3,
any.sop-core ==0.5.0.1,
any.split ==0.2.3.4,
any.splitmix ==0.1.0.3,
splitmix -optimised-mixer,
any.stm ==2.5.0.0,
any.stm-chans ==3.0.0.4,
any.streaming-commons ==0.2.2.1,
streaming-commons -use-bytestring-builder,
any.streamly ==0.7.3,
streamly -debug -dev -examples -examples-sdl -fusion-plugin -has-llvm -inspection -no-fusion -streamk,
any.streamly-bytestring ==0.1.2,
any.streamly-posix ==0.1.0.1,
any.strict ==0.4.0.1,
strict +assoc,
any.strict-base ==0.4.0.0,
any.string-interpolate ==0.3.1.0,
string-interpolate -bytestring-builder -extended-benchmarks -text-builder,
any.syb ==0.7.2.1,
any.tagged ==0.8.6.1,
tagged +deepseq +transformers,
any.tasty ==1.3.1,
tasty +clock,
any.tasty-hunit ==0.10.0.3,
any.tasty-quickcheck ==0.10.1.2,
any.template-haskell ==2.16.0.0,
any.terminal-size ==0.3.2.1,
any.terminfo ==0.4.1.4,
any.text ==1.2.4.1,
any.text-conversions ==0.3.1,
any.text-short ==0.1.3,
text-short -asserts,
any.tf-random ==0.5,
any.th-abstraction ==0.4.2.0,
any.th-compat ==0.1.1,
any.th-lift ==0.8.2,
any.th-lift-instances ==0.1.18,
any.th-orphans ==0.13.11,
any.th-reify-many ==0.1.9,
any.these ==1.1.1.1,
these +assoc,
any.time ==1.9.3,
any.time-compat ==1.9.5,
time-compat -old-locale,
any.transformers ==0.5.6.2,
any.transformers-base ==0.4.5.2,
transformers-base +orphaninstances,
any.transformers-compat ==0.6.6,
transformers-compat -five +five-three -four +generic-deriving +mtl -three -two,
any.typed-process ==0.2.6.0,
any.unbounded-delays ==0.1.1.1,
any.unix ==2.7.2.2,
any.unix-bytestring ==0.3.7.3,
any.unix-compat ==0.5.3,
unix-compat -old-time,
any.unix-time ==0.4.7,
any.unliftio-core ==0.2.0.1,
any.unordered-containers ==0.2.13.0,
unordered-containers -debug,
any.uri-bytestring ==0.3.3.0,
uri-bytestring -lib-werror,
any.utf8-string ==1.0.2,
any.uuid-types ==1.0.4,
any.vector ==0.12.2.0,
vector +boundschecks -internalchecks -unsafechecks -wall,
any.vector-algorithms ==0.8.0.4,
vector-algorithms +bench +boundschecks -internalchecks -llvm +properties -unsafechecks,
any.versions ==4.0.3,
any.vty ==5.33,
any.wcwidth ==0.0.2,
wcwidth -cli +split-base,
any.word8 ==0.1.3,
any.yaml ==0.11.5.0,
yaml +no-examples +no-exe,
zlib -non-blocking-ffi -pkg-config -static
index-state: hackage.haskell.org 2021-03-07T18:36:25Z

20
cabal.ghc884.project Normal file
View File

@@ -0,0 +1,20 @@
packages: ./ghcup.cabal
with-compiler: ghc-8.8.4
optional-packages: ./3rdparty/*/*.cabal
optimization: 2
package streamly
ghc-options: -O2 -fspec-constr-recursive=16 -fmax-worker-args=16
package ghcup
ghc-options: -O2 -fspec-constr-recursive=16 -fmax-worker-args=16
constraints: http-io-streams -brotli
package libarchive
flags: -system-libarchive
allow-newer: base, ghc-prim, template-haskell

262
cabal.ghc884.project.freeze Normal file
View File

@@ -0,0 +1,262 @@
active-repositories: hackage.haskell.org:merge
constraints: any.Cabal ==3.0.1.0,
any.HUnit ==1.6.2.0,
any.IfElse ==0.85,
any.QuickCheck ==2.14.2,
QuickCheck -old-random +templatehaskell,
any.StateVar ==1.2.1,
any.abstract-deque ==0.3,
abstract-deque -usecas,
any.aeson ==1.5.6.0,
aeson -bytestring-builder -cffi -developer -fast,
any.aeson-pretty ==0.8.8,
aeson-pretty -lib-only,
any.alex ==3.2.6,
alex +small_base,
any.ansi-terminal ==0.11,
ansi-terminal -example,
any.ansi-wl-pprint ==0.6.9,
ansi-wl-pprint -example,
any.array ==0.5.4.0,
any.ascii-string ==1.0.1.4,
any.assoc ==1.0.2,
any.async ==2.2.3,
async -bench,
any.atomic-primops ==0.8.4,
atomic-primops -debug,
any.attoparsec ==0.13.2.5,
attoparsec -developer,
any.auto-update ==0.1.6,
any.base ==4.13.0.0,
any.base-compat ==0.11.2,
any.base-compat-batteries ==0.11.2,
any.base-orphans ==0.8.4,
any.base16-bytestring ==1.0.1.0,
any.base64-bytestring ==1.2.0.1,
any.bifunctors ==5.5.10,
bifunctors +semigroups +tagged,
any.binary ==0.8.7.0,
any.blaze-builder ==0.4.2.1,
any.bytestring ==0.10.10.1,
any.bz2 ==1.0.1.0,
bz2 -cross +with-bzlib,
any.c2hs ==0.28.7,
c2hs +base3 -regression,
any.call-stack ==0.3.0,
any.case-insensitive ==1.2.1.0,
any.casing ==0.1.4.1,
any.cereal ==0.5.8.1,
cereal -bytestring-builder,
any.chs-cabal ==0.1.1.0,
any.chs-deps ==0.1.0.0,
chs-deps -cross,
any.clock ==0.8.2,
clock -llvm,
any.cmdargs ==0.10.21,
cmdargs +quotation -testprog,
any.colour ==2.3.5,
any.comonad ==5.0.8,
comonad +containers +distributive +indexed-traversable,
any.composition-prelude ==3.0.0.2,
composition-prelude -development,
any.concurrent-output ==1.10.12,
any.conduit ==1.3.4.1,
any.conduit-extra ==1.3.5,
any.containers ==0.6.2.1,
any.contravariant ==1.5.3,
contravariant +semigroups +statevar +tagged,
any.cryptohash-sha256 ==0.11.102.0,
cryptohash-sha256 -exe +use-cbits,
any.data-default-class ==0.1.2.0,
any.data-fix ==0.3.1,
any.deepseq ==1.4.4.0,
any.deferred-folds ==0.9.17,
any.directory ==1.3.6.0,
any.distributive ==0.6.2.1,
distributive +semigroups +tagged,
any.dlist ==1.0,
dlist -werror,
any.easy-file ==0.2.2,
any.errors ==2.3.0,
any.exceptions ==0.10.4,
exceptions +transformers-0-4,
any.fast-logger ==3.0.3,
any.filepath ==1.4.2.1,
any.focus ==1.0.2,
any.foldl ==1.4.11,
any.free ==5.1.6,
any.fusion-plugin-types ==0.1.0,
any.generic-arbitrary ==0.1.0,
any.generics-sop ==0.5.1.1,
any.ghc-boot-th ==8.8.4,
any.ghc-prim ==0.5.3,
ghcup -internal-downloader -tar -tui,
any.happy ==1.20.0,
any.hashable ==1.3.1.0,
hashable +integer-gmp,
any.haskell-src-exts ==1.23.1,
any.haskell-src-meta ==0.8.7,
any.haskus-utils-data ==1.4,
any.haskus-utils-types ==1.5.1,
any.haskus-utils-variant ==3.1,
any.heaps ==0.3.6.1,
any.hpath ==0.11.0,
any.hpath-directory ==0.14.1,
any.hpath-filepath ==0.10.4,
any.hpath-io ==0.14.1,
any.hpath-posix ==0.13.2,
any.hsc2hs ==0.68.7,
hsc2hs -in-ghc-tree,
any.hspec ==2.7.8,
any.hspec-core ==2.7.8,
any.hspec-discover ==2.7.8,
any.hspec-expectations ==0.8.2,
any.hspec-golden-aeson ==0.7.0.0,
any.indexed-profunctors ==0.1,
any.indexed-traversable ==0.1.1,
any.indexed-traversable-instances ==0.1,
any.integer-gmp ==1.0.2.0,
any.integer-logarithms ==1.0.3.1,
integer-logarithms -check-bounds +integer-gmp,
any.language-c ==0.8.3,
language-c -allwarnings +iecfpextension +separatesyb +usebytestrings,
any.libarchive ==3.0.2.1,
libarchive -cross -low-memory -system-libarchive,
any.libyaml ==0.1.2,
libyaml -no-unicode -system-libyaml,
any.lifted-base ==0.2.3.12,
any.list-t ==1.0.4,
any.lockfree-queue ==0.2.3.1,
lzma -static,
any.math-functions ==0.3.4.1,
math-functions +system-erf +system-expm1,
any.megaparsec ==9.0.1,
megaparsec -dev,
any.microlens ==0.4.12.0,
any.microlens-mtl ==0.2.0.1,
any.microlens-th ==0.4.3.9,
any.mmorph ==1.1.5,
any.monad-control ==1.0.2.3,
any.monad-logger ==0.3.36,
monad-logger +template_haskell,
any.monad-loops ==0.4.3,
monad-loops +base4,
any.mono-traversable ==1.0.15.1,
any.mtl ==2.2.2,
any.mwc-random ==0.15.0.1,
any.network ==3.1.2.1,
network -devel,
any.old-locale ==1.0.0.7,
any.old-time ==1.1.0.3,
any.optics ==0.4,
any.optics-core ==0.4,
optics-core -explicit-generic-labels,
any.optics-extra ==0.4,
any.optics-th ==0.4,
any.optics-vl ==0.2.1,
any.optparse-applicative ==0.16.1.0,
optparse-applicative +process,
any.os-release ==1.0.2,
os-release -devel,
any.parallel ==3.2.2.0,
any.parsec ==3.1.14.0,
any.parser-combinators ==1.3.0,
parser-combinators -dev,
any.pretty ==1.1.3.6,
any.pretty-terminal ==0.1.0.0,
any.primitive ==0.7.1.0,
any.primitive-extras ==0.8.2,
any.primitive-unlifted ==0.1.3.0,
any.process ==1.6.9.0,
any.profunctors ==5.6.2,
any.quickcheck-arbitrary-adt ==0.3.1.0,
any.quickcheck-io ==0.2.0,
any.random ==1.2.0,
any.recursion-schemes ==5.2.2,
recursion-schemes +template-haskell,
any.regex-posix ==0.96.0.0,
regex-posix -_regex-posix-clib,
any.resourcet ==1.2.4.2,
any.rts ==1.0,
any.safe ==0.3.19,
any.safe-exceptions ==0.1.7.1,
any.scientific ==0.3.6.2,
scientific -bytestring-builder -integer-simple,
any.semigroupoids ==5.3.5,
semigroupoids +comonad +containers +contravariant +distributive +tagged +unordered-containers,
any.setenv ==0.1.1.3,
any.sop-core ==0.5.0.1,
any.split ==0.2.3.4,
any.splitmix ==0.1.0.3,
splitmix -optimised-mixer,
any.stm ==2.5.0.0,
any.stm-chans ==3.0.0.4,
any.streaming-commons ==0.2.2.1,
streaming-commons -use-bytestring-builder,
any.streamly ==0.7.3,
streamly -debug -dev -examples -examples-sdl -fusion-plugin -has-llvm -inspection -no-fusion -streamk,
any.streamly-bytestring ==0.1.2,
any.streamly-posix ==0.1.0.1,
any.strict ==0.4.0.1,
strict +assoc,
any.strict-base ==0.4.0.0,
any.string-interpolate ==0.3.1.0,
string-interpolate -bytestring-builder -extended-benchmarks -text-builder,
any.syb ==0.7.2.1,
any.tagged ==0.8.6.1,
tagged +deepseq +transformers,
any.tasty ==1.3.1,
tasty +clock,
any.tasty-hunit ==0.10.0.3,
any.tasty-quickcheck ==0.10.1.2,
any.template-haskell ==2.15.0.0,
any.terminal-size ==0.3.2.1,
any.terminfo ==0.4.1.4,
any.text ==1.2.4.0,
any.text-conversions ==0.3.1,
any.text-short ==0.1.3,
text-short -asserts,
any.tf-random ==0.5,
any.th-abstraction ==0.4.2.0,
any.th-compat ==0.1.1,
any.th-lift ==0.8.2,
any.th-lift-instances ==0.1.18,
any.th-orphans ==0.13.11,
any.th-reify-many ==0.1.9,
any.these ==1.1.1.1,
these +assoc,
any.time ==1.9.3,
any.time-compat ==1.9.5,
time-compat -old-locale,
any.transformers ==0.5.6.2,
any.transformers-base ==0.4.5.2,
transformers-base +orphaninstances,
any.transformers-compat ==0.6.6,
transformers-compat -five +five-three -four +generic-deriving +mtl -three -two,
any.typed-process ==0.2.6.0,
any.unbounded-delays ==0.1.1.1,
any.unix ==2.7.2.2,
any.unix-bytestring ==0.3.7.3,
any.unix-compat ==0.5.3,
unix-compat -old-time,
any.unix-time ==0.4.7,
any.unliftio-core ==0.2.0.1,
any.unordered-containers ==0.2.13.0,
unordered-containers -debug,
any.uri-bytestring ==0.3.3.0,
uri-bytestring -lib-werror,
any.utf8-string ==1.0.2,
any.uuid-types ==1.0.4,
any.vector ==0.12.2.0,
vector +boundschecks -internalchecks -unsafechecks -wall,
any.vector-algorithms ==0.8.0.4,
vector-algorithms +bench +boundschecks -internalchecks -llvm +properties -unsafechecks,
any.versions ==4.0.3,
any.vty ==5.33,
any.wcwidth ==0.0.2,
wcwidth -cli +split-base,
any.word8 ==0.1.3,
any.yaml ==0.11.5.0,
yaml +no-examples +no-exe,
zlib -non-blocking-ffi -pkg-config -static
index-state: hackage.haskell.org 2021-03-07T18:36:25Z

View File

@@ -2,12 +2,6 @@ packages: ./ghcup.cabal
optional-packages: ./3rdparty/*/*.cabal
source-repository-package
type: git
location: https://github.com/haskus/packages.git
tag: 80a1c5fc07f7226c424250ec17f674cd4d618f42
subdir: haskus-utils-types
optimization: 2
package streamly
@@ -15,10 +9,12 @@ package streamly
package ghcup
ghc-options: -O2 -fspec-constr-recursive=16 -fmax-worker-args=16
tests: True
flags: +tui
constraints: http-io-streams -brotli
package libarchive
flags: static
flags: -system-libarchive
allow-newer: base ghc-prim template-haskell
allow-newer: base, ghc-prim, template-haskell

61
config.yaml Normal file
View File

@@ -0,0 +1,61 @@
# Cache downloads in ~/.ghcup/cache
cache: False
# Skip tarball checksum verification
no-verify: False
# enable verbosity
verbose: False
# When to keep build directories
keep-dirs: Errors # Always | Never | Errors
# Which downloader to use
downloader: Curl # Curl | Wget | Internal
# TUI key bindings,
# see https://hackage.haskell.org/package/vty-5.31/docs/Graphics-Vty-Input-Events.html#t:Key
# for possible values.
key-bindings:
up:
KUp: []
down:
KDown: []
quit:
KChar: 'q'
install:
KChar: 'i'
uninstall:
KChar: 'u'
set:
KChar: 's'
changelog:
KChar: 'c'
show-all:
KChar: 'a'
# Where to get GHC/cabal/hls download info/versions from. For more detailed explanation
# check the 'URLSource' type in the code.
url-source:
## Use the internal download uri, this is the default
GHCupURL: []
## Example 1: Read download info from this location instead
## Accepts file/http/https scheme
# OwnSource: "file:///home/jule/git/ghcup-hs/ghcup-0.0.3.yaml"
## Example 2: Add custom tarballs to the default downloads, overwriting duplicate versions
# AddSource:
# Left:
# toolRequirements: {} # this is ignored
# ghcupDownloads:
# GHC:
# 9.10.2:
# viTags: []
# viArch:
# A_64:
# Linux_UnknownLinux:
# unknown_versioning:
# dlUri: https://downloads.haskell.org/~ghc/7.10.3/ghc-7.10.3-x86_64-deb8-linux.tar.bz2
# dlSubdir: ghc-7.10.3
# dlHash: 01cfbad8dff1e8b34a5fdca8caeaf843b56e36af919e29cd68870d2588563db5
## Example 3: Add a custom download file to the default downloads, overwriting duplicate versions
# AddSource:
# Right: "file:///home/jule/git/ghcup-hs/ghcup-custom.yaml"

File diff suppressed because it is too large Load Diff

1415
ghcup-0.0.2.yaml Normal file

File diff suppressed because it is too large Load Diff

1500
ghcup-0.0.3.yaml Normal file

File diff suppressed because it is too large Load Diff

1753
ghcup-0.0.4.yaml Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
cabal-version: 3.0
name: ghcup
version: 0.1.8
version: 0.1.14
synopsis: ghc toolchain installer as an exe/library
description:
A rewrite of the shell script ghcup, for providing
@@ -15,7 +15,7 @@ maintainer: hasufell@posteo.de
copyright: Julian Ospald 2020
category: System
build-type: Simple
extra-source-files: CHANGELOG.md
extra-doc-files: CHANGELOG.md
source-repository head
type: git
@@ -52,7 +52,7 @@ common async
build-depends: async >=0.8
common base
build-depends: base >=4.12 && <5
build-depends: base >=4.13 && <5
common base16-bytestring
build-depends: base16-bytestring >= 0.1.1.6
@@ -72,6 +72,9 @@ common bz2
common case-insensitive
build-depends: case-insensitive >=1.2.1.0
common casing
build-depends: casing >=0.1.4.1
common concurrent-output
build-depends: concurrent-output >=1.10.11
@@ -81,6 +84,9 @@ common containers
common cryptohash-sha256
build-depends: cryptohash-sha256 >= 0.11.101.0
common generic-arbitrary
build-depends: generic-arbitrary >=0.1.0
common generics-sop
build-depends: generics-sop >=0.5
@@ -94,13 +100,13 @@ common hpath
build-depends: hpath >=0.11
common hpath-directory
build-depends: hpath-directory >=0.14
build-depends: hpath-directory >=0.14.1
common hpath-filepath
build-depends: hpath-filepath >=0.10.3
common hpath-io
build-depends: hpath-io >=0.14
build-depends: hpath-io >=0.14.1
common hpath-posix
build-depends: hpath-posix >=0.13.2
@@ -108,11 +114,17 @@ common hpath-posix
common http-io-streams
build-depends: http-io-streams >=0.1.2.0
common hspec
build-depends: hspec >=2.7.4
common hspec-golden-aeson
build-depends: hspec-golden-aeson >=0.7
common io-streams
build-depends: io-streams >=1.5
common libarchive
build-depends: libarchive >= 2.2.5.2
build-depends: libarchive >= 3.0.0.0
common lzma
build-depends: lzma >=0.0.0.3
@@ -138,6 +150,9 @@ common optparse-applicative
common parsec
build-depends: parsec >=3.1
common pretty
build-depends: pretty >=1.1.3.1
common pretty-terminal
build-depends: pretty-terminal >=0.1.0.0
@@ -153,8 +168,11 @@ common safe
common safe-exceptions
build-depends: safe-exceptions >=0.1
common split
build-depends: split >=0.2.3.4
common streamly
build-depends: streamly >=0.7.1
build-depends: streamly >=0.7.3
common streamly-posix
build-depends: streamly-posix >=0.1.0.0
@@ -168,9 +186,6 @@ common strict-base
common string-interpolate
build-depends: string-interpolate >=0.2.0.0
common table-layout
build-depends: table-layout >=0.8
common template-haskell
build-depends: template-haskell >=2.7
@@ -192,6 +207,12 @@ common transformers
common os-release
build-depends: os-release >=1.0.0
common QuickCheck
build-depends: QuickCheck >=2.14.1
common quickcheck-arbitrary-adt
build-depends: quickcheck-arbitrary-adt >=0.3.1.0
common unix
build-depends: unix >=2.7
@@ -211,7 +232,7 @@ common vector
build-depends: vector >=0.12
common versions
build-depends: versions >=3.5
build-depends: versions >=4.0.1
common vty
build-depends: vty >=5.28.2
@@ -219,8 +240,11 @@ common vty
common word8
build-depends: word8 >=0.1.3
common yaml
build-depends: yaml >=0.11.4.0
common zlib
build-depends: zlib >=0.6.2.1
build-depends: zlib >=0.6.2.2
common config
default-language: Haskell2010
@@ -234,8 +258,6 @@ common config
PackageImports
RecordWildCards
ScopedTypeVariables
Strict
StrictData
TupleSections
library
@@ -250,6 +272,7 @@ library
, bytestring
, bz2
, case-insensitive
, casing
, concurrent-output
, containers
, cryptohash-sha256
@@ -268,11 +291,13 @@ library
, optics
, optics-vl
, parsec
, pretty
, pretty-terminal
, regex-posix
, resourcet
, safe
, safe-exceptions
, split
, streamly
, streamly-posix
, streamly-bytestring
@@ -290,14 +315,13 @@ library
, utf8-string
, vector
, versions
, vty
, word8
, yaml
, zlib
exposed-modules:
GHCup
GHCup.Data.GHCupDownloads
GHCup.Data.GHCupInfo
GHCup.Data.ToolRequirements
GHCup.Download
GHCup.Download.Utils
GHCup.Errors
@@ -316,7 +340,13 @@ library
GHCup.Utils.Version.QQ
GHCup.Version
-- other-modules:
other-modules:
Paths_ghcup
default-extensions:
Strict
StrictData
-- other-extensions:
hs-source-dirs: lib
@@ -351,12 +381,12 @@ executable ghcup
, monad-logger
, mtl
, optparse-applicative
, pretty
, pretty-terminal
, resourcet
, safe
, safe-exceptions
, string-interpolate
, table-layout
, template-haskell
, text
, uri-bytestring
@@ -372,6 +402,10 @@ executable ghcup
hs-source-dirs: app/ghcup
default-language: Haskell2010
default-extensions:
Strict
StrictData
if flag(internal-downloader)
cpp-options: -DINTERNAL_DOWNLOADER
@@ -403,16 +437,18 @@ executable ghcup-gen
, mtl
, optics
, optparse-applicative
, pretty
, pretty-terminal
, regex-posix
, resourcet
, safe-exceptions
, string-interpolate
, table-layout
, text
, transformers
, uri-bytestring
, utf8-string
, versions
, yaml
--
main-is: Main.hs
@@ -425,8 +461,25 @@ executable ghcup-gen
default-language: Haskell2010
test-suite ghcup-test
default-language: Haskell2010
import:
config
, base
, bytestring
, containers
, QuickCheck
, generic-arbitrary
, hpath
, hspec
, hspec-golden-aeson
, quickcheck-arbitrary-adt
, text
, uri-bytestring
, versions
type: exitcode-stdio-1.0
build-depends: ghcup
hs-source-dirs: test
main-is: MyLibTest.hs
build-depends: base >=4.12.0.0
main-is: Main.hs
other-modules:
GHCup.ArbitraryTypes
GHCup.Types.JSONSpec
Spec

20178
golden/GHCupInfo.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,10 @@
cradle:
cabal:
- path: "."
component: "ghcup:lib:ghcup"
- component: "ghcup:lib:ghcup"
path: ./lib
- component: "ghcup:exe:ghcup"
path: ./app/ghcup
- component: "ghcup:exe:ghcup-gen"
path: "./app/ghcup-gen"
- component: "ghcup:test:ghcup-test"
path: ./test

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,20 +0,0 @@
{-|
Module : GHCup.Data.GHCupInfo
Description :
Copyright : (c) Julian Ospald, 2020
License : GPL-3
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
-}
module GHCup.Data.GHCupInfo where
import GHCup.Data.GHCupDownloads
import GHCup.Data.ToolRequirements
import GHCup.Types
ghcupInfo :: GHCupInfo
ghcupInfo = GHCupInfo { _toolRequirements = toolRequirements
, _ghcupDownloads = ghcupDownloads
}

View File

@@ -1,156 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-|
Module : GHCup.Data.ToolRequirements
Description : Tool requirements
Copyright : (c) Julian Ospald, 2020
License : GPL-3
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
-}
module GHCup.Data.ToolRequirements where
import GHCup.Types
import GHCup.Utils.String.QQ
import GHCup.Utils.Version.QQ
import qualified Data.Map as M
-- | Currently 'GHC' is used for both GHC and cabal to simplify
-- this, until we need actual separation.
toolRequirements :: ToolRequirements
toolRequirements = M.fromList
[ ( GHC
, M.fromList
[ ( Nothing
, M.fromList
[ ( Linux UnknownLinux
, M.fromList
[ ( Nothing
, Requirements
[]
[s|You need the following packages: curl g++ gcc gmp make ncurses realpath xz-utils. Consult your distro documentation on the exact names of those packages.|]
)
]
)
, ( Linux Alpine
, M.fromList
[ ( Nothing
, Requirements
[ "curl"
, "gcc"
, "g++"
, "gmp-dev"
, "ncurses-dev"
, "libffi-dev"
, "make"
, "xz"
, "tar"
, "perl"
]
""
)
]
)
, ( Linux Ubuntu
, M.fromList
[ ( Nothing
, Requirements
[ "build-essential"
, "curl"
, "libffi-dev"
, "libffi6"
, "libgmp-dev"
, "libgmp10"
, "libncurses-dev"
, "libncurses5"
, "libtinfo5"
]
""
)
]
)
, ( Linux Debian
, M.fromList
[ ( Nothing
, Requirements
[ "build-essential"
, "curl"
, "libffi-dev"
, "libffi6"
, "libgmp-dev"
, "libgmp10"
, "libncurses-dev"
, "libncurses5"
, "libtinfo5"
]
""
)
]
)
, ( Linux CentOS
, M.fromList
[ ( Nothing
, Requirements
[ "gcc"
, "gcc-c++"
, "gmp"
, "gmp-devel"
, "make"
, "ncurses"
, "ncurses-compat-libs"
, "xz"
, "perl"
]
""
),
( Just [vers|7|]
, Requirements
[ "gcc"
, "gcc-c++"
, "gmp"
, "gmp-devel"
, "make"
, "ncurses"
, "xz"
, "perl"
]
""
)
]
)
, ( Darwin
, M.fromList
[ ( Nothing
, Requirements
[]
"On OS X, in the course of running ghcup you will be given a dialog box to install the command line tools. Accept and the requirements will be installed for you. You will then need to run the command again."
)
]
)
, ( FreeBSD
, M.fromList
[ ( Nothing
, Requirements
[ "curl"
, "gcc"
, "gmp"
, "gmake"
, "ncurses"
, "perl5"
, "libffi"
, "libiconv"
]
""
)
]
)
]
)
]
)
]

View File

@@ -13,7 +13,7 @@
Module : GHCup.Download
Description : Downloading
Copyright : (c) Julian Ospald, 2020
License : GPL-3
License : LGPL-3.0
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
@@ -52,10 +52,12 @@ import Control.Monad.Reader
import Control.Monad.Trans.Resource
hiding ( throwM )
import Data.Aeson
import Data.Bifunctor
import Data.ByteString ( ByteString )
#if defined(INTERNAL_DOWNLOADER)
import Data.CaseInsensitive ( CI )
#endif
import Data.List ( find )
import Data.Maybe
import Data.String.Interpolate
import Data.Time.Clock
@@ -82,12 +84,13 @@ import qualified Crypto.Hash.SHA256 as SHA256
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Lazy as L
import qualified Data.Map.Strict as M
#if defined(INTERNAL_DOWNLOADER)
import qualified Data.CaseInsensitive as CI
import qualified Data.Map.Strict as M
import qualified Data.Text as T
#endif
import qualified Data.Text.Encoding as E
import qualified Data.Yaml as Y
import qualified System.Posix.Files.ByteString as PF
import qualified System.Posix.RawFilePath.Directory
as RD
@@ -102,8 +105,8 @@ import qualified System.Posix.RawFilePath.Directory
------------------
-- | Like 'getDownloads', but tries to fall back to
-- cached ~/.ghcup/cache/ghcup-<format-ver>.json
-- | Downloads the download information! But only if we need to ;P
getDownloadsF :: ( FromJSONKey Tool
, FromJSONKey Version
, FromJSON VersionInfo
@@ -112,7 +115,7 @@ getDownloadsF :: ( FromJSONKey Tool
, MonadLogger m
, MonadThrow m
, MonadFail m
, MonadReader Settings m
, MonadReader AppState m
)
=> URLSource
-> Excepts
@@ -121,54 +124,58 @@ getDownloadsF :: ( FromJSONKey Tool
GHCupInfo
getDownloadsF urlSource = do
case urlSource of
GHCupURL ->
liftE
$ handleIO (\_ -> readFromCache)
$ catchE @_ @'[JSONError , FileDoesNotExistError]
(\(DownloadFailed _) -> readFromCache)
$ getDownloads urlSource
(OwnSource _) -> liftE $ getDownloads urlSource
(OwnSpec _) -> liftE $ getDownloads urlSource
where
readFromCache = do
lift $ $(logWarn)
[i|Could not get download info, trying cached version (this may not be recent!)|]
let path = view pathL' ghcupURL
cacheDir <- liftIO $ ghcupCacheDir
json_file <- (cacheDir </>) <$> urlBaseName path
bs <-
handleIO' NoSuchThing
(\_ -> throwE $ FileDoesNotExistError (toFilePath json_file))
$ liftIO
$ readFile json_file
lE' JSONDecodeError $ eitherDecode' bs
-- | Downloads the download information! But only if we need to ;P
getDownloads :: ( FromJSONKey Tool
, FromJSONKey Version
, FromJSON VersionInfo
, MonadIO m
, MonadCatch m
, MonadLogger m
, MonadThrow m
, MonadFail m
, MonadReader Settings m
)
=> URLSource
-> Excepts '[JSONError , DownloadFailed] m GHCupInfo
getDownloads urlSource = do
lift $ $(logDebug) [i|Receiving download info from: #{urlSource}|]
case urlSource of
GHCupURL -> do
bs <- reThrowAll DownloadFailed $ smartDl ghcupURL
lE' JSONDecodeError $ eitherDecode' bs
GHCupURL -> liftE getBase
(OwnSource url) -> do
bs <- reThrowAll DownloadFailed $ downloadBS url
lE' JSONDecodeError $ eitherDecode' bs
(OwnSpec av) -> pure $ av
lE' JSONDecodeError $ first show $ Y.decodeEither' (L.toStrict bs)
(OwnSpec av) -> pure av
(AddSource (Left ext)) -> do
base <- liftE getBase
pure (mergeGhcupInfo base ext)
(AddSource (Right uri)) -> do
base <- liftE getBase
bsExt <- reThrowAll DownloadFailed $ downloadBS uri
ext <- lE' JSONDecodeError $ first show $ Y.decodeEither' (L.toStrict bsExt)
pure (mergeGhcupInfo base ext)
where
where
mergeGhcupInfo :: GHCupInfo -- ^ base to merge with
-> GHCupInfo -- ^ extension overwriting the base
-> GHCupInfo
mergeGhcupInfo (GHCupInfo tr base) (GHCupInfo _ ext) =
let new = M.mapWithKey (\k a -> case M.lookup k ext of
Just a' -> M.union a' a
Nothing -> a
) base
in GHCupInfo tr new
readFromCache :: (MonadIO m, MonadCatch m, MonadLogger m, MonadReader AppState m)
=> Excepts '[JSONError, FileDoesNotExistError] m GHCupInfo
readFromCache = do
AppState {dirs = Dirs {..}} <- lift ask
lift $ $(logWarn)
[i|Could not get download info, trying cached version (this may not be recent!)|]
let path = view pathL' ghcupURL
yaml_file <- (cacheDir </>) <$> urlBaseName path
bs <-
handleIO' NoSuchThing
(\_ -> throwE $ FileDoesNotExistError (toFilePath yaml_file))
$ liftIO
$ readFile yaml_file
lE' JSONDecodeError $ first show $ Y.decodeEither' (L.toStrict bs)
getBase :: (MonadFail m, MonadIO m, MonadCatch m, MonadLogger m, MonadReader AppState m)
=> Excepts '[JSONError , FileDoesNotExistError] m GHCupInfo
getBase =
handleIO (\_ -> readFromCache)
$ catchE @_ @'[JSONError, FileDoesNotExistError]
(\(DownloadFailed _) -> readFromCache)
(reThrowAll @_ @_ @'[JSONError, DownloadFailed] DownloadFailed (smartDl ghcupURL)
>>= (liftE . lE' @_ @_ @'[JSONError] JSONDecodeError . first show . Y.decodeEither' . L.toStrict))
where
-- First check if the json file is in the ~/.ghcup/cache dir
-- and check it's access time. If it has been accessed within the
-- last 5 minutes, just reuse it.
@@ -183,7 +190,7 @@ getDownloads urlSource = do
, MonadIO m1
, MonadFail m1
, MonadLogger m1
, MonadReader Settings m1
, MonadReader AppState m1
)
=> URI
-> Excepts
@@ -198,16 +205,16 @@ getDownloads urlSource = do
m1
L.ByteString
smartDl uri' = do
AppState {dirs = Dirs {..}} <- lift ask
let path = view pathL' uri'
cacheDir <- liftIO $ ghcupCacheDir
json_file <- (cacheDir </>) <$> urlBaseName path
e <- liftIO $ doesFileExist json_file
if e
then do
accessTime <-
PF.accessTimeHiRes
<$> (liftIO $ PF.getFileStatus (toFilePath json_file))
currentTime <- liftIO $ getPOSIXTime
<$> liftIO (PF.getFileStatus (toFilePath json_file))
currentTime <- liftIO getPOSIXTime
-- access time won't work on most linuxes, but we can try regardless
if (currentTime - accessTime) > 300
@@ -224,7 +231,7 @@ getDownloads urlSource = do
else -- access in less than 5 minutes, re-use file
liftIO $ readFile json_file
else do
liftIO $ createDirIfMissing newDirPerms cacheDir
liftIO $ createDirRecursive' cacheDir
getModTime >>= \case
Just modTime -> dlWithMod modTime json_file
Nothing -> do
@@ -290,7 +297,8 @@ getDownloadInfo t v (PlatformRequest a p mv) dls = maybe
(case p of
-- non-musl won't work on alpine
Linux Alpine -> with_distro <|> without_distro_ver
_ -> with_distro <|> without_distro_ver <|> without_distro)
_ -> with_distro <|> without_distro_ver <|> without_distro
)
where
with_distro = distro_preview id id
@@ -298,7 +306,18 @@ getDownloadInfo t v (PlatformRequest a p mv) dls = maybe
without_distro = distro_preview (set _Linux UnknownLinux) (const Nothing)
distro_preview f g =
preview (ix t % ix v % viArch % ix a % ix (f p) % ix (g mv)) dls
let platformVersionSpec =
preview (ix t % ix v % viArch % ix a % ix (f p)) dls
mv' = g mv
in fmap snd
. find
(\(mverRange, _) -> maybe
(isNothing mv')
(\range -> maybe False (`versionRange` range) mv')
mverRange
)
. M.toList
=<< platformVersionSpec
-- | Tries to download from the given http or https url
@@ -309,7 +328,7 @@ getDownloadInfo t v (PlatformRequest a p mv) dls = maybe
--
-- The file must not exist.
download :: ( MonadMask m
, MonadReader Settings m
, MonadReader AppState m
, MonadThrow m
, MonadLogger m
, MonadIO m
@@ -328,7 +347,7 @@ download dli dest mfn
scheme = view (dlUri % uriSchemeL' % schemeBSL') dli
cp = do
-- destination dir must exist
liftIO $ hideError AlreadyExists $ createDirRecursive newDirPerms dest
liftIO $ createDirRecursive' dest
destFile <- getDestFile
fromFile <- parseAbs path
liftIO $ copyFile fromFile destFile Strict
@@ -338,7 +357,7 @@ download dli dest mfn
lift $ $(logInfo) [i|downloading: #{uri'}|]
-- destination dir must exist
liftIO $ hideError AlreadyExists $ createDirRecursive newDirPerms dest
liftIO $ createDirRecursive' dest
destFile <- getDestFile
-- download
@@ -346,7 +365,7 @@ download dli dest mfn
(liftIO $ hideError doesNotExistErrorType $ deleteFile destFile)
$ catchAllE @_ @'[ProcessError, DownloadFailed, UnsupportedScheme]
(\e ->
(liftIO $ hideError doesNotExistErrorType $ deleteFile destFile)
liftIO (hideError doesNotExistErrorType $ deleteFile destFile)
>> (throwE . DownloadFailed $ e)
) $ do
lift getDownloader >>= \case
@@ -381,7 +400,7 @@ downloadCached :: ( MonadMask m
, MonadThrow m
, MonadLogger m
, MonadIO m
, MonadReader Settings m
, MonadReader AppState m
)
=> DownloadInfo
-> Maybe (Path Rel) -- ^ optional filename
@@ -390,15 +409,15 @@ downloadCached dli mfn = do
cache <- lift getCache
case cache of
True -> do
cachedir <- liftIO $ ghcupCacheDir
AppState {dirs = Dirs {..}} <- lift ask
fn <- maybe (urlBaseName $ view (dlUri % pathL') dli) pure mfn
let cachfile = cachedir </> fn
let cachfile = cacheDir </> fn
fileExists <- liftIO $ doesFileExist cachfile
if
| fileExists -> do
liftE $ checkDigest dli cachfile
pure $ cachfile
| otherwise -> liftE $ download dli cachedir mfn
pure cachfile
| otherwise -> liftE $ download dli cacheDir mfn
False -> do
tmp <- lift withGHCupTmpDir
liftE $ download dli tmp mfn
@@ -414,7 +433,7 @@ downloadCached dli mfn = do
-- | This is used for downloading the JSON.
downloadBS :: (MonadReader Settings m, MonadCatch m, MonadIO m, MonadLogger m)
downloadBS :: (MonadReader AppState m, MonadCatch m, MonadIO m, MonadLogger m)
=> URI
-> Excepts
'[ FileDoesNotExistError
@@ -434,7 +453,7 @@ downloadBS uri'
= dl False
| scheme == "file"
= liftIOException doesNotExistErrorType (FileDoesNotExistError path)
$ (liftIO $ RD.readFile path)
(liftIO $ RD.readFile path)
| otherwise
= throwE UnsupportedScheme
@@ -471,12 +490,12 @@ downloadBS uri'
#endif
checkDigest :: (MonadIO m, MonadThrow m, MonadLogger m, MonadReader Settings m)
checkDigest :: (MonadIO m, MonadThrow m, MonadLogger m, MonadReader AppState m)
=> DownloadInfo
-> Path Abs
-> Excepts '[DigestError] m ()
checkDigest dli file = do
verify <- lift ask <&> (not . noVerify)
verify <- lift ask <&> (not . noVerify . settings)
when verify $ do
p' <- toFilePath <$> basename file
lift $ $(logInfo) [i|verifying digest of: #{p'}|]

View File

@@ -1,10 +1,6 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
@@ -72,7 +68,7 @@ downloadBS' :: MonadIO m
, TooManyRedirs
]
m
(L.ByteString)
L.ByteString
downloadBS' https host path port = do
bref <- liftIO $ newIORef (mempty :: Builder)
let stepper bs = modifyIORef bref (<> byteString bs)
@@ -132,7 +128,7 @@ downloadInternal = go (5 :: Int)
if
| scode >= 200 && scode < 300 -> downloadStream r i' >> pure Nothing
| scode >= 300 && scode < 400 -> case getHeader r "Location" of
Just r' -> pure $ Just $ r'
Just r' -> pure $ Just r'
Nothing -> throwE NoLocationHeader
| otherwise -> throwE $ HTTPStatusError scode
)
@@ -151,7 +147,7 @@ downloadInternal = go (5 :: Int)
Nothing -> 0
mpb <- if progressBar
then Just <$> (liftIO $ newProgressBar defStyle 10 (Progress 0 size ()))
then Just <$> liftIO (newProgressBar defStyle 10 (Progress 0 size ()))
else pure Nothing
outStream <- liftIO $ Streams.makeOutputStream
@@ -224,9 +220,9 @@ headInternal = go (5 :: Int)
if
| scode >= 200 && scode < 300 -> do
let headers = getHeaderMap r
pure $ Right $ headers
pure $ Right headers
| scode >= 300 && scode < 400 -> case getHeader r "Location" of
Just r' -> pure $ Left $ r'
Just r' -> pure $ Left r'
Nothing -> throwE NoLocationHeader
| otherwise -> throwE $ HTTPStatusError scode
)
@@ -243,7 +239,7 @@ withConnection' :: Bool
-> Maybe Int
-> (Connection -> IO a)
-> IO a
withConnection' https host port action = bracket acquire closeConnection action
withConnection' https host port = bracket acquire closeConnection
where
acquire = case https of

View File

@@ -1,10 +1,6 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
@@ -55,7 +51,7 @@ uriToQuadruple URI {..} = do
let queryBS =
BS.intercalate "&"
. fmap (\(x, y) -> encodeQuery x <> "=" <> encodeQuery y)
$ (queryPairs uriQuery)
$ queryPairs uriQuery
port =
preview (_Just % authorityPortL' % _Just % portNumberL') uriAuthority
fullpath = if BS.null queryBS then uriPath else uriPath <> "?" <> queryBS

View File

@@ -1,13 +1,18 @@
{-# OPTIONS_GHC -Wno-orphans #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-|
Module : GHCup.Errors
Description : GHCup error types
Copyright : (c) Julian Ospald, 2020
License : GPL-3
License : LGPL-3.0
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
@@ -15,13 +20,21 @@ Portability : POSIX
module GHCup.Errors where
import GHCup.Types
import GHCup.Utils.Prelude
#if !defined(TAR)
import Codec.Archive
#endif
import Control.Exception.Safe
import Data.ByteString ( ByteString )
import Data.String.Interpolate
import Data.Text ( Text )
import Data.Versions
import Haskus.Utils.Variant
import HPath
import Haskus.Utils.Variant
import Text.PrettyPrint
import Text.PrettyPrint.HughesPJClass
import URI.ByteString
@@ -35,94 +48,203 @@ import HPath
data NoCompatiblePlatform = NoCompatiblePlatform String -- the platform we got
deriving Show
instance Pretty NoCompatiblePlatform where
pPrint (NoCompatiblePlatform str') =
text ("Could not find a compatible platform. Got: " ++ str')
-- | Unable to find a download for the requested versio/distro.
data NoDownload = NoDownload
deriving Show
instance Pretty NoDownload where
pPrint NoDownload =
text "Unable to find a download for the requested version/distro."
-- | No update available or necessary.
data NoUpdate = NoUpdate
deriving Show
instance Pretty NoUpdate where
pPrint NoUpdate = text "No update available or necessary."
-- | The Architecture is unknown and unsupported.
data NoCompatibleArch = NoCompatibleArch String
deriving Show
instance Pretty NoCompatibleArch where
pPrint (NoCompatibleArch arch) =
text ("The Architecture is unknown or unsupported. Got: " ++ arch)
-- | Unable to figure out the distribution of the host.
data DistroNotFound = DistroNotFound
deriving Show
instance Pretty DistroNotFound where
pPrint DistroNotFound =
text "Unable to figure out the distribution of the host."
-- | The archive format is unknown. We don't know how to extract it.
data UnknownArchive = UnknownArchive ByteString
deriving Show
instance Pretty UnknownArchive where
pPrint (UnknownArchive file) =
text [i|The archive format is unknown. We don't know how to extract the file "#{decUTF8Safe file}"|]
-- | The scheme is not supported (such as ftp).
data UnsupportedScheme = UnsupportedScheme
deriving Show
instance Pretty UnsupportedScheme where
pPrint UnsupportedScheme = text "The scheme is not supported (such as ftp)."
-- | Unable to copy a file.
data CopyError = CopyError String
deriving Show
instance Pretty CopyError where
pPrint (CopyError reason) =
text ("Unable to copy a file. Reason was: " ++ reason)
-- | Unable to find a tag of a tool.
data TagNotFound = TagNotFound Tag Tool
deriving Show
instance Pretty TagNotFound where
pPrint (TagNotFound tag tool) =
text "Unable to find tag" <+> pPrint tag <+> text [i|of tool "#{tool}"|]
-- | Unable to find the next version of a tool (the one after the currently
-- set one).
data NextVerNotFound = NextVerNotFound Tool
deriving Show
instance Pretty NextVerNotFound where
pPrint (NextVerNotFound tool) =
text [i|Unable to find next (the one after the currently set one) version of tool "#{tool}"|]
-- | The tool (such as GHC) is already installed with that version.
data AlreadyInstalled = AlreadyInstalled Tool Version
deriving Show
instance Pretty AlreadyInstalled where
pPrint (AlreadyInstalled tool ver') =
text [i|#{tool}-#{prettyShow ver'} is already installed|]
-- | The tool is not installed. Some operations rely on a tool
-- to be installed (such as setting the current GHC version).
data NotInstalled = NotInstalled Tool Text
data NotInstalled = NotInstalled Tool GHCTargetVersion
deriving Show
instance Pretty NotInstalled where
pPrint (NotInstalled tool ver) =
text [i|The version "#{prettyShow ver}" of the tool "#{tool}" is not installed.|]
-- | An executable was expected to be in PATH, but was not found.
data NotFoundInPATH = NotFoundInPATH (Path Rel)
deriving Show
instance Pretty NotFoundInPATH where
pPrint (NotFoundInPATH exe) =
text [i|The exe "#{decUTF8Safe . toFilePath $ exe}" was not found in PATH.|]
-- | JSON decoding failed.
data JSONError = JSONDecodeError String
deriving Show
instance Pretty JSONError where
pPrint (JSONDecodeError err) =
text [i|JSON decoding failed with: #{err}|]
-- | A file that is supposed to exist does not exist
-- (e.g. when we use file scheme to "download" something).
data FileDoesNotExistError = FileDoesNotExistError ByteString
deriving Show
instance Pretty FileDoesNotExistError where
pPrint (FileDoesNotExistError file) =
text [i|File "#{decUTF8Safe file}" does not exist.|]
data TarDirDoesNotExist = TarDirDoesNotExist TarDir
deriving Show
instance Pretty TarDirDoesNotExist where
pPrint (TarDirDoesNotExist dir) =
text "Tar directory does not exist:" <+> pPrint dir
-- | File digest verification failed.
data DigestError = DigestError Text Text
deriving Show
instance Pretty DigestError where
pPrint (DigestError currentDigest expectedDigest) =
text [i|Digest error: expected "#{expectedDigest}", but got "#{currentDigest}"|]
-- | Unexpected HTTP status.
data HTTPStatusError = HTTPStatusError Int
deriving Show
instance Pretty HTTPStatusError where
pPrint (HTTPStatusError status) =
text [i|Unexpected HTTP status: #{status}|]
-- | The 'Location' header was expected during a 3xx redirect, but not found.
data NoLocationHeader = NoLocationHeader
deriving Show
instance Pretty NoLocationHeader where
pPrint NoLocationHeader =
text [i|The 'Location' header was expected during a 3xx redirect, but not found.|]
-- | Too many redirects.
data TooManyRedirs = TooManyRedirs
deriving Show
instance Pretty TooManyRedirs where
pPrint TooManyRedirs =
text [i|Too many redirections.|]
-- | A patch could not be applied.
data PatchFailed = PatchFailed
deriving Show
instance Pretty PatchFailed where
pPrint PatchFailed =
text [i|A patch could not be applied.|]
-- | The tool requirements could not be found.
data NoToolRequirements = NoToolRequirements
deriving Show
instance Pretty NoToolRequirements where
pPrint NoToolRequirements =
text [i|The Tool requirements could not be found.|]
data InvalidBuildConfig = InvalidBuildConfig Text
deriving Show
instance Pretty InvalidBuildConfig where
pPrint (InvalidBuildConfig reason) =
text [i|The build config is invalid. Reason was: #{reason}|]
data NoToolVersionSet = NoToolVersionSet Tool
deriving Show
instance Pretty NoToolVersionSet where
pPrint (NoToolVersionSet tool) =
text [i|No version is set for tool "#{tool}".|]
-------------------------
--[ High-level errors ]--
-------------------------
-- | A download failed. The underlying error is encapsulated.
data DownloadFailed = forall es . Show (V es) => DownloadFailed (V es)
data DownloadFailed = forall x xs . (Show x, Show (V xs), Pretty x, Pretty (V xs)) => DownloadFailed (V (x ': xs))
instance Pretty DownloadFailed where
pPrint (DownloadFailed reason) =
text "Download failed:" <+> pPrint reason
deriving instance Show DownloadFailed
@@ -130,12 +252,20 @@ deriving instance Show DownloadFailed
-- | A build failed.
data BuildFailed = forall es . Show (V es) => BuildFailed (Path Abs) (V es)
instance Pretty BuildFailed where
pPrint (BuildFailed path reason) =
text [i|BuildFailed failed in dir "#{decUTF8Safe . toFilePath $ path}": #{reason}|]
deriving instance Show BuildFailed
-- | Setting the current GHC version failed.
data GHCupSetError = forall es . Show (V es) => GHCupSetError (V es)
instance Pretty GHCupSetError where
pPrint (GHCupSetError reason) =
text [i|Setting the current GHC version failed: #{reason}|]
deriving instance Show GHCupSetError
@@ -148,4 +278,65 @@ deriving instance Show GHCupSetError
data ParseError = ParseError String
deriving Show
instance Pretty ParseError where
pPrint (ParseError reason) =
text [i|Parsing failed: #{reason}|]
instance Exception ParseError
data UnexpectedListLength = UnexpectedListLength String
deriving Show
instance Pretty UnexpectedListLength where
pPrint (UnexpectedListLength reason) =
text [i|List length unexpected: #{reason}|]
instance Exception UnexpectedListLength
------------------------
--[ orphan instances ]--
------------------------
instance Pretty (V '[]) where
{-# INLINABLE pPrint #-}
pPrint _ = undefined
instance
( Pretty x
, Pretty (V xs)
) => Pretty (V (x ': xs))
where
pPrint v = case popVariantHead v of
Right x -> pPrint x
Left xs -> pPrint xs
instance Pretty URIParseError where
pPrint (MalformedScheme reason) =
text [i|Failed to parse URI. Malformed scheme: #{reason}|]
pPrint MalformedUserInfo =
text [i|Failed to parse URI. Malformed user info.|]
pPrint MalformedQuery =
text [i|Failed to parse URI. Malformed query.|]
pPrint MalformedFragment =
text [i|Failed to parse URI. Malformed fragment.|]
pPrint MalformedHost =
text [i|Failed to parse URI. Malformed host.|]
pPrint MalformedPort =
text [i|Failed to parse URI. Malformed port.|]
pPrint MalformedPath =
text [i|Failed to parse URI. Malformed path.|]
pPrint (OtherError err) =
text [i|Failed to parse URI: #{err}|]
#if !defined(TAR)
instance Pretty ArchiveResult where
pPrint ArchiveFatal = text "Archive result: fatal"
pPrint ArchiveFailed = text "Archive result: failed"
pPrint ArchiveWarn = text "Archive result: warning"
pPrint ArchiveRetry = text "Archive result: retry"
pPrint ArchiveOk = text "Archive result: Ok"
pPrint ArchiveEOF = text "Archive result: EOF"
#endif

View File

@@ -10,7 +10,7 @@
Module : GHCup.Plaform
Description : Retrieving platform information
Copyright : (c) Julian Ospald, 2020
License : GPL-3
License : LGPL-3.0
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
@@ -92,17 +92,16 @@ getPlatform = do
pure $ PlatformResult { _platform = Linux distro, _distroVersion = ver }
"darwin" -> do
ver <-
( either (const Nothing) Just
either (const Nothing) Just
. versioning
-- TODO: maybe do this somewhere else
. getMajorVersion
. decUTF8Safe
)
<$> getDarwinVersion
<$> getDarwinVersion
pure $ PlatformResult { _platform = Darwin, _distroVersion = ver }
"freebsd" -> do
ver <-
(either (const Nothing) Just . versioning . decUTF8Safe)
either (const Nothing) Just . versioning . decUTF8Safe
<$> getFreeBSDVersion
pure $ PlatformResult { _platform = FreeBSD, _distroVersion = ver }
what -> throwE $ NoCompatiblePlatform what
@@ -157,7 +156,7 @@ getLinuxDistro = do
try_os_release :: IO (Text, Maybe Text)
try_os_release = do
Just (OsRelease { name = name, version_id = version_id }) <-
Just OsRelease{ name = name, version_id = version_id } <-
fmap osRelease <$> parseOsRelease
pure (T.pack name, fmap T.pack version_id)
@@ -174,7 +173,7 @@ getLinuxDistro = do
let nameRegex n =
makeRegexOpts compIgnoreCase
execBlank
(([s|\<|] <> fS n <> [s|\>|] :: ByteString)) :: Regex
([s|\<|] <> fS n <> [s|\>|] :: ByteString) :: Regex
let verRegex =
makeRegexOpts compIgnoreCase
execBlank

View File

@@ -4,7 +4,7 @@
Module : GHCup.Requirements
Description : Requirements utilities
Copyright : (c) Julian Ospald, 2020
License : GPL-3
License : LGPL-3.0
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
@@ -14,8 +14,10 @@ module GHCup.Requirements where
import GHCup.Types
import GHCup.Types.JSON ( )
import GHCup.Types.Optics
import GHCup.Version
import Control.Applicative
import Data.List ( find )
import Data.Maybe
import Optics
import Prelude hiding ( abs
@@ -23,6 +25,7 @@ import Prelude hiding ( abs
, writeFile
)
import qualified Data.Map.Strict as M
import qualified Data.Text as T
@@ -33,22 +36,32 @@ getCommonRequirements :: PlatformResult
-> ToolRequirements
-> Maybe Requirements
getCommonRequirements pr tr =
preview (ix GHC % ix Nothing % ix (_platform pr) % ix (_distroVersion pr)) tr
<|> preview (ix GHC % ix Nothing % ix (_platform pr) % ix Nothing) tr
<|> preview
( ix GHC
% ix Nothing
% ix (set _Linux UnknownLinux $ _platform pr)
% ix Nothing
)
tr
with_distro <|> without_distro_ver <|> without_distro
where
with_distro = distro_preview _platform _distroVersion
without_distro_ver = distro_preview _platform (const Nothing)
without_distro = distro_preview (set _Linux UnknownLinux . _platform) (const Nothing)
distro_preview f g =
let platformVersionSpec =
preview (ix GHC % ix Nothing % ix (f pr)) tr
mv' = g pr
in fmap snd
. find
(\(mverRange, _) -> maybe
(isNothing mv')
(\range -> maybe False (`versionRange` range) mv')
mverRange
)
. M.toList
=<< platformVersionSpec
prettyRequirements :: Requirements -> T.Text
prettyRequirements Requirements {..} =
let d = if not . null $ _distroPKGs
then
"\n Install the following distro packages: "
"\n Please install the following distro packages: "
<> T.intercalate " " _distroPKGs
else ""
n = if not . T.null $ _notes then "\n Note: " <> _notes else ""

View File

@@ -1,12 +1,14 @@
{-# OPTIONS_GHC -Wno-orphans #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-|
Module : GHCup.Types
Description : GHCup types
Copyright : (c) Julian Ospald, 2020
License : GPL-3
License : LGPL-3.0
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
@@ -14,12 +16,19 @@ Portability : POSIX
module GHCup.Types where
import Data.Map.Strict ( Map )
import Data.List.NonEmpty ( NonEmpty (..) )
import Data.String.Interpolate
import Data.Text ( Text )
import Data.Versions
import HPath
import Text.PrettyPrint.HughesPJClass (Pretty, pPrint, text)
import URI.ByteString
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import qualified Data.Text.Encoding.Error as E
import qualified GHC.Generics as GHC
import qualified Graphics.Vty as Vty
@@ -44,7 +53,7 @@ data GHCupInfo = GHCupInfo
type ToolRequirements = Map Tool ToolReqVersionSpec
type ToolReqVersionSpec = Map (Maybe Version) PlatformReqSpec
type PlatformReqSpec = Map Platform PlatformReqVersionSpec
type PlatformReqVersionSpec = Map (Maybe Versioning) Requirements
type PlatformReqVersionSpec = Map (Maybe VersionRange) Requirements
data Requirements = Requirements
@@ -68,34 +77,56 @@ type GHCupDownloads = Map Tool ToolVersionSpec
type ToolVersionSpec = Map Version VersionInfo
type ArchitectureSpec = Map Architecture PlatformSpec
type PlatformSpec = Map Platform PlatformVersionSpec
type PlatformVersionSpec = Map (Maybe Versioning) DownloadInfo
type PlatformVersionSpec = Map (Maybe VersionRange) DownloadInfo
-- | An installable tool.
data Tool = GHC
| Cabal
| GHCup
deriving (Eq, GHC.Generic, Ord, Show)
| HLS
deriving (Eq, GHC.Generic, Ord, Show, Enum, Bounded)
-- | All necessary information of a tool version, including
-- source download and per-architecture downloads.
data VersionInfo = VersionInfo
{ _viTags :: [Tag] -- ^ version specific tag
, _viChangeLog :: Maybe URI
, _viSourceDL :: Maybe DownloadInfo -- ^ source tarball
, _viArch :: ArchitectureSpec -- ^ descend for binary downloads per arch
{ _viTags :: [Tag] -- ^ version specific tag
, _viChangeLog :: Maybe URI
, _viSourceDL :: Maybe DownloadInfo -- ^ source tarball
, _viArch :: ArchitectureSpec -- ^ descend for binary downloads per arch
-- informative messages
, _viPostInstall :: Maybe Text
, _viPostRemove :: Maybe Text
, _viPreCompile :: Maybe Text
}
deriving (Eq, Show)
deriving (Eq, GHC.Generic, Show)
-- | A tag. These are currently attached to a version of a tool.
data Tag = Latest
| Recommended
| Prerelease
| Base PVP
| Old -- ^ old version are hidden by default in TUI
| UnknownTag String -- ^ used for upwardscompat
deriving (Ord, Eq, Show) -- FIXME: manual JSON instance
deriving (Ord, Eq, GHC.Generic, Show) -- FIXME: manual JSON instance
tagToString :: Tag -> String
tagToString Recommended = "recommended"
tagToString Latest = "latest"
tagToString Prerelease = "prerelease"
tagToString (Base pvp'') = "base-" ++ T.unpack (prettyPVP pvp'')
tagToString (UnknownTag t ) = t
tagToString Old = ""
instance Pretty Tag where
pPrint Recommended = text "recommended"
pPrint Latest = text "latest"
pPrint Prerelease = text "prerelease"
pPrint (Base pvp'') = text ("base-" ++ T.unpack (prettyPVP pvp''))
pPrint (UnknownTag t ) = text t
pPrint Old = mempty
data Architecture = A_64
| A_32
@@ -107,6 +138,18 @@ data Architecture = A_64
| A_ARM64
deriving (Eq, GHC.Generic, Ord, Show)
archToString :: Architecture -> String
archToString A_64 = "x86_64"
archToString A_32 = "i386"
archToString A_PowerPC = "powerpc"
archToString A_PowerPC64 = "powerpc64"
archToString A_Sparc = "sparc"
archToString A_Sparc64 = "sparc64"
archToString A_ARM = "arm"
archToString A_ARM64 = "aarch64"
instance Pretty Architecture where
pPrint = text . archToString
data Platform = Linux LinuxDistro
-- ^ must exit
@@ -115,6 +158,14 @@ data Platform = Linux LinuxDistro
| FreeBSD
deriving (Eq, GHC.Generic, Ord, Show)
platformToString :: Platform -> String
platformToString (Linux distro) = "linux-" ++ distroToString distro
platformToString Darwin = "darwin"
platformToString FreeBSD = "freebsd"
instance Pretty Platform where
pPrint = text . platformToString
data LinuxDistro = Debian
| Ubuntu
| Mint
@@ -131,15 +182,31 @@ data LinuxDistro = Debian
-- ^ must exit
deriving (Eq, GHC.Generic, Ord, Show)
distroToString :: LinuxDistro -> String
distroToString Debian = "debian"
distroToString Ubuntu = "ubuntu"
distroToString Mint= "mint"
distroToString Fedora = "fedora"
distroToString CentOS = "centos"
distroToString RedHat = "redhat"
distroToString Alpine = "alpine"
distroToString AmazonLinux = "amazon"
distroToString Gentoo = "gentoo"
distroToString Exherbo = "exherbo"
distroToString UnknownLinux = "unknown"
instance Pretty LinuxDistro where
pPrint = text . distroToString
-- | An encapsulation of a download. This can be used
-- to download, extract and install a tool.
data DownloadInfo = DownloadInfo
{ _dlUri :: URI
, _dlSubdir :: Maybe (Path Rel)
, _dlSubdir :: Maybe TarDir
, _dlHash :: Text
}
deriving (Eq, Show)
deriving (Eq, Ord, GHC.Generic, Show)
@@ -149,23 +216,99 @@ data DownloadInfo = DownloadInfo
--------------
-- | How to descend into a tar archive.
data TarDir = RealDir (Path Rel)
| RegexDir String -- ^ will be compiled to regex, the first match will "win"
deriving (Eq, Ord, GHC.Generic, Show)
instance Pretty TarDir where
pPrint (RealDir path) = text [i|#{E.decodeUtf8With E.lenientDecode . toFilePath $ path}|]
pPrint (RegexDir regex) = text regex
-- | Where to fetch GHCupDownloads from.
data URLSource = GHCupURL
| OwnSource URI
| OwnSpec GHCupInfo
deriving Show
| AddSource (Either GHCupInfo URI) -- ^ merge with GHCupURL
deriving (GHC.Generic, Show)
data UserSettings = UserSettings
{ uCache :: Maybe Bool
, uNoVerify :: Maybe Bool
, uVerbose :: Maybe Bool
, uKeepDirs :: Maybe KeepDirs
, uDownloader :: Maybe Downloader
, uKeyBindings :: Maybe UserKeyBindings
, uUrlSource :: Maybe URLSource
}
deriving (Show, GHC.Generic)
defaultUserSettings :: UserSettings
defaultUserSettings = UserSettings Nothing Nothing Nothing Nothing Nothing Nothing Nothing
data UserKeyBindings = UserKeyBindings
{ kUp :: Maybe Vty.Key
, kDown :: Maybe Vty.Key
, kQuit :: Maybe Vty.Key
, kInstall :: Maybe Vty.Key
, kUninstall :: Maybe Vty.Key
, kSet :: Maybe Vty.Key
, kChangelog :: Maybe Vty.Key
, kShowAll :: Maybe Vty.Key
}
deriving (Show, GHC.Generic)
data KeyBindings = KeyBindings
{ bUp :: Vty.Key
, bDown :: Vty.Key
, bQuit :: Vty.Key
, bInstall :: Vty.Key
, bUninstall :: Vty.Key
, bSet :: Vty.Key
, bChangelog :: Vty.Key
, bShowAll :: Vty.Key
}
deriving (Show, GHC.Generic)
defaultKeyBindings :: KeyBindings
defaultKeyBindings = KeyBindings
{ bUp = Vty.KUp
, bDown = Vty.KDown
, bQuit = Vty.KChar 'q'
, bInstall = Vty.KChar 'i'
, bUninstall = Vty.KChar 'u'
, bSet = Vty.KChar 's'
, bChangelog = Vty.KChar 'c'
, bShowAll = Vty.KChar 'a'
}
data AppState = AppState
{ settings :: Settings
, dirs :: Dirs
, keyBindings :: KeyBindings
} deriving (Show)
data Settings = Settings
{ cache :: Bool
, noVerify :: Bool
, keepDirs :: KeepDirs
, downloader :: Downloader
, verbose :: Bool
, urlSource :: URLSource
}
deriving (Show, GHC.Generic)
data Dirs = Dirs
{ baseDir :: Path Abs
, binDir :: Path Abs
, cacheDir :: Path Abs
, logsDir :: Path Abs
, confDir :: Path Abs
}
deriving Show
data KeepDirs = Always
| Errors
| Never
@@ -201,6 +344,15 @@ data PlatformResult = PlatformResult
}
deriving (Eq, Show)
platResToString :: PlatformResult -> String
platResToString PlatformResult { _platform = plat, _distroVersion = Just v' }
= show plat <> ", " <> T.unpack (prettyV v')
platResToString PlatformResult { _platform = plat, _distroVersion = Nothing }
= show plat
instance Pretty PlatformResult where
pPrint = text . platResToString
data PlatformRequest = PlatformRequest
{ _rArch :: Architecture
, _rPlatform :: Platform
@@ -208,6 +360,16 @@ data PlatformRequest = PlatformRequest
}
deriving (Eq, Show)
pfReqToString :: PlatformRequest -> String
pfReqToString (PlatformRequest arch plat ver) =
archToString arch ++ "-" ++ platformToString plat ++ pver
where
pver = case ver of
Just v' -> "-" ++ T.unpack (prettyV v')
Nothing -> ""
instance Pretty PlatformRequest where
pPrint = text . pfReqToString
-- | A GHC identified by the target platform triple
-- and the version.
@@ -221,9 +383,33 @@ data GHCTargetVersion = GHCTargetVersion
mkTVer :: Version -> GHCTargetVersion
mkTVer = GHCTargetVersion Nothing
tVerToText :: GHCTargetVersion -> Text
tVerToText (GHCTargetVersion (Just t) v') = t <> "-" <> prettyVer v'
tVerToText (GHCTargetVersion Nothing v') = prettyVer v'
-- | Assembles a path of the form: <target-triple>-<version>
prettyTVer :: GHCTargetVersion -> Text
prettyTVer (GHCTargetVersion (Just t) v') = t <> "-" <> prettyVer v'
prettyTVer (GHCTargetVersion Nothing v') = prettyVer v'
instance Pretty GHCTargetVersion where
pPrint = text . T.unpack . tVerToText
-- | A comparator and a version.
data VersionCmp = VR_gt Versioning
| VR_gteq Versioning
| VR_lt Versioning
| VR_lteq Versioning
| VR_eq Versioning
deriving (Eq, GHC.Generic, Ord, Show)
-- | A version range. Supports && and ||, but not arbitrary
-- combinations. This is a little simplified.
data VersionRange = SimpleRange (NonEmpty VersionCmp) -- And
| OrRange (NonEmpty VersionCmp) VersionRange
deriving (Eq, GHC.Generic, Ord, Show)
instance Pretty Versioning where
pPrint = text . T.unpack . prettyV
instance Pretty Version where
pPrint = text . T.unpack . prettyVer

View File

@@ -14,7 +14,7 @@
Module : GHCup.Types.JSON
Description : GHCup JSON types/instances
Copyright : (c) Julian Ospald, 2020
License : GPL-3
License : LGPL-3.0
Maintainer : hasufell@hasufell.de
Stability : experimental
Portability : POSIX
@@ -22,24 +22,34 @@ Portability : POSIX
module GHCup.Types.JSON where
import GHCup.Types
import GHCup.Utils.MegaParsec
import GHCup.Utils.Prelude
import Control.Applicative ( (<|>) )
import Data.Aeson
import Data.Aeson.TH
import Data.Aeson.Types
import Data.List.NonEmpty ( NonEmpty(..) )
import Data.Text.Encoding as E
import Data.Versions
import Data.Void
import Data.Word8
import HPath
import URI.ByteString
import Text.Casing
import qualified Data.ByteString as BS
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import qualified Graphics.Vty as Vty
import qualified Text.Megaparsec as MP
import qualified Text.Megaparsec.Char as MPC
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } { fieldLabelModifier = removeLensFieldLabel } ''Architecture
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''LinuxDistro
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Mess
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''MChunk
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Platform
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''SemVer
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Tool
@@ -49,10 +59,18 @@ deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Versio
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''DownloadInfo
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''GHCupInfo
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Requirements
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''KeepDirs
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Downloader
deriveJSON defaultOptions { sumEncoding = ObjectWithSingleField } ''URLSource
deriveJSON defaultOptions { fieldLabelModifier = \str' -> maybe str' T.unpack . T.stripPrefix (T.pack "u-") . T.pack . kebab $ str' } ''UserSettings
deriveJSON defaultOptions { fieldLabelModifier = \str' -> maybe str' T.unpack . T.stripPrefix (T.pack "k-") . T.pack . kebab $ str' } ''UserKeyBindings
deriveJSON defaultOptions { sumEncoding = ObjectWithSingleField } ''Vty.Key
instance ToJSON Tag where
toJSON Latest = String "Latest"
toJSON Recommended = String "Recommended"
toJSON Prerelease = String "Prerelease"
toJSON Old = String "old"
toJSON (Base pvp'') = String ("base-" <> prettyPVP pvp'')
toJSON (UnknownTag x ) = String (T.pack x)
@@ -60,6 +78,8 @@ instance FromJSON Tag where
parseJSON = withText "Tag" $ \t -> case T.unpack t of
"Latest" -> pure Latest
"Recommended" -> pure Recommended
"Prerelease" -> pure Prerelease
"old" -> pure Old
('b' : 'a' : 's' : 'e' : '-' : ver') -> case pvp (T.pack ver') of
Right x -> pure $ Base x
Left e -> fail . show $ e
@@ -97,10 +117,10 @@ instance ToJSONKey (Maybe Versioning) where
instance FromJSONKey (Maybe Versioning) where
fromJSONKey = FromJSONKeyTextParser $ \t ->
if t == T.pack "unknown_versioning" then pure Nothing else pure $ just t
if t == T.pack "unknown_versioning" then pure Nothing else just t
where
just t = case versioning t of
Right x -> pure x
Right x -> pure $ Just x
Left e -> fail $ "Failure in (Maybe Versioning) (FromJSONKey)" <> show e
instance ToJSONKey Platform where
@@ -128,7 +148,7 @@ instance FromJSONKey Platform where
$ "Unexpected failure in decoding LinuxDistro: "
<> show dstr
Nothing -> fail "Unexpected failure in Platform stripPrefix"
| otherwise -> fail $ "Failure in Platform (FromJSONKey)"
| otherwise -> fail "Failure in Platform (FromJSONKey)"
instance ToJSONKey Architecture where
toJSONKey = genericToJSONKey defaultJSONKeyOptions
@@ -143,10 +163,10 @@ instance ToJSONKey (Maybe Version) where
instance FromJSONKey (Maybe Version) where
fromJSONKey = FromJSONKeyTextParser $ \t ->
if t == T.pack "unknown_version" then pure Nothing else pure $ just t
if t == T.pack "unknown_version" then pure Nothing else just t
where
just t = case version t of
Right x -> pure x
Right x -> pure $ Just x
Left e -> fail $ "Failure in (Maybe Version) (FromJSONKey)" <> show e
instance ToJSON Version where
@@ -191,3 +211,114 @@ instance FromJSON (Path Rel) where
case parseRel d of
Right x -> pure x
Left e -> fail $ "Failure in HPath Rel (FromJSON)" <> show e
instance ToJSON TarDir where
toJSON (RealDir p) = toJSON p
toJSON (RegexDir r) = object ["RegexDir" .= r]
instance FromJSON TarDir where
parseJSON v = realDir v <|> regexDir v
where
realDir = withText "TarDir" $ \t -> do
fp <- parseJSON (String t)
pure (RealDir fp)
regexDir = withObject "TarDir" $ \o -> do
r <- o .: "RegexDir"
pure $ RegexDir r
instance ToJSON VersionCmp where
toJSON = String . versionCmpToText
instance FromJSON VersionCmp where
parseJSON = withText "VersionCmp" $ \t -> do
case MP.parse versionCmpP "" t of
Right r -> pure r
Left e -> fail (MP.errorBundlePretty e)
versionCmpToText :: VersionCmp -> T.Text
versionCmpToText (VR_gt ver') = "> " <> prettyV ver'
versionCmpToText (VR_gteq ver') = ">= " <> prettyV ver'
versionCmpToText (VR_lt ver') = "< " <> prettyV ver'
versionCmpToText (VR_lteq ver') = "<= " <> prettyV ver'
versionCmpToText (VR_eq ver') = "== " <> prettyV ver'
versionCmpP :: MP.Parsec Void T.Text VersionCmp
versionCmpP =
fmap VR_gt (MP.try $ MPC.space *> MP.chunk ">" *> MPC.space *> versioningEnd)
<|> fmap
VR_gteq
(MP.try $ MPC.space *> MP.chunk ">=" *> MPC.space *> versioningEnd)
<|> fmap
VR_lt
(MP.try $ MPC.space *> MP.chunk "<" *> MPC.space *> versioningEnd)
<|> fmap
VR_lteq
(MP.try $ MPC.space *> MP.chunk "<=" *> MPC.space *> versioningEnd)
<|> fmap
VR_eq
(MP.try $ MPC.space *> MP.chunk "==" *> MPC.space *> versioningEnd)
<|> fmap
VR_eq
(MP.try $ MPC.space *> versioningEnd)
instance ToJSON VersionRange where
toJSON = String . verRangeToText
verRangeToText :: VersionRange -> T.Text
verRangeToText (SimpleRange cmps) =
let inner = foldr1 (\x y -> x <> " && " <> y)
(versionCmpToText <$> NE.toList cmps)
in "( " <> inner <> " )"
verRangeToText (OrRange cmps range) =
let left = verRangeToText (SimpleRange cmps)
right = verRangeToText range
in left <> " || " <> right
instance FromJSON VersionRange where
parseJSON = withText "VersionRange" $ \t -> do
case MP.parse versionRangeP "" t of
Right r -> pure r
Left e -> fail (MP.errorBundlePretty e)
versionRangeP :: MP.Parsec Void T.Text VersionRange
versionRangeP = go <* MP.eof
where
go =
MP.try orParse
<|> MP.try (fmap SimpleRange andParse)
<|> fmap (SimpleRange . pure) versionCmpP
orParse :: MP.Parsec Void T.Text VersionRange
orParse =
(\a o -> OrRange a o)
<$> (MP.try andParse <|> fmap pure versionCmpP)
<*> (MPC.space *> MP.chunk "||" *> MPC.space *> go)
andParse :: MP.Parsec Void T.Text (NonEmpty VersionCmp)
andParse =
fmap (\h t -> h :| t)
(MPC.space *> MP.chunk "(" *> MPC.space *> versionCmpP)
<*> MP.try (MP.many (MPC.space *> MP.chunk "&&" *> MPC.space *> versionCmpP))
<* MPC.space
<* MP.chunk ")"
<* MPC.space
versioningEnd :: MP.Parsec Void T.Text Versioning
versioningEnd =
MP.try (verP (MP.chunk " " <|> MP.chunk ")" <|> MP.chunk "&&") <* MPC.space)
<|> versioning'
instance ToJSONKey (Maybe VersionRange) where
toJSONKey = toJSONKeyText $ \case
Just x -> verRangeToText x
Nothing -> "unknown_versioning"
instance FromJSONKey (Maybe VersionRange) where
fromJSONKey = FromJSONKeyTextParser $ \t ->
if t == T.pack "unknown_versioning" then pure Nothing else just t
where
just t = case MP.parse versionRangeP "" t of
Right x -> pure $ Just x
Left e -> fail $ "Failure in (Maybe VersionRange) (FromJSONKey)" <> MP.errorBundlePretty e

Some files were not shown because too many files have changed in this diff Show More