pax_global_header00006660000000000000000000000064146551501250014516gustar00rootroot0000000000000052 comment=eaeff3236ebe82039553f39bc285ee5c79caae3c extract-4.0.0/000077500000000000000000000000001465515012500131715ustar00rootroot00000000000000extract-4.0.0/.github/000077500000000000000000000000001465515012500145315ustar00rootroot00000000000000extract-4.0.0/.github/workflows/000077500000000000000000000000001465515012500165665ustar00rootroot00000000000000extract-4.0.0/.github/workflows/test.yaml000066400000000000000000000007151465515012500204340ustar00rootroot00000000000000name: test on: push: pull_request: jobs: build-and-test: strategy: matrix: os: [ubuntu-latest, macOS-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v1 - uses: actions/setup-go@v1 with: go-version: "1.22" - name: Build native run: go build -v ./... shell: bash - name: Run unit tests run: go test -v -race -bench=. ./... shell: bash extract-4.0.0/LICENSE000066400000000000000000000020641465515012500142000ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2016 codeclysm Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. extract-4.0.0/README.md000066400000000000000000000047231465515012500144560ustar00rootroot00000000000000# Extract [![Build Status](https://github.com/codeclysm/extract/actions/workflows/test.yaml/badge.svg?branch=master)](https://github.com/codeclysm/extract/actions/workflows/test.yaml) [![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://raw.githubusercontent.com/codeclysm/extract/master/LICENSE) [![Godoc Reference](https://img.shields.io/badge/Godoc-Reference-blue.svg)](https://godoc.org/github.com/codeclysm/extract) import "github.com/codeclysm/extract/v4" Package extract allows to extract archives in zip, tar.gz or tar.bz2 formats easily. Most of the time you'll just need to call the proper function with a Reader and a destination: ```go file, _ := os.Open("path/to/file.tar.bz2") extract.Bz2(context.TODO, file, "/path/where/to/extract", nil) ``` or also: ```go data, _ := ioutil.ReadFile("path/to/file.tar.bz2") buffer := bytes.NewBuffer(data) extract.Bz2(context.TODO, buffer, "/path/where/to/extract", nil) ``` Sometimes you'll want a bit more control over the files, such as extracting a subfolder of the archive. In this cases you can specify a renamer func that will change the path for every file: ```go var shift = func(path string) string { parts := strings.Split(path, string(filepath.Separator)) parts = parts[1:] return strings.Join(parts, string(filepath.Separator)) } extract.Bz2(context.TODO, file, "/path/where/to/extract", shift) ``` If you don't know which archive you're dealing with (life really is always a surprise) you can use Archive, which will infer the type of archive from the first bytes ```go extract.Archive(context.TODO, file, "/path/where/to/extract", nil) ``` If you need more control over how your files will be extracted you can use an Extractor. It Needs a FS object that implements the FS interface: ```go type FS interface { Link(string, string) error MkdirAll(string, os.FileMode) error OpenFile(name string, flag int, perm os.FileMode) (*os.File, error) Symlink(string, string) error Remove(path string) error Stat(name string) (os.FileInfo, error) Chmod(name string, mode os.FileMode) error } ``` which contains only the required function to perform an extraction. This way it's easy to wrap os functions to chroot the path, or scramble the files, or send an event for each operation or even reimplementing them for an in-memory store, I don't know. ```go extractor := extract.Extractor{ FS: fs, } extractor.Archive(context.TODO, file, "/path/where/to/extract", nil) ``` extract-4.0.0/cancelable_reader.go000066400000000000000000000010701465515012500171110ustar00rootroot00000000000000package extract import ( "context" "errors" "io" ) func copyCancel(ctx context.Context, dst io.Writer, src io.Reader) (int64, error) { return io.Copy(dst, newCancelableReader(ctx, src)) } type cancelableReader struct { ctx context.Context src io.Reader } func (r *cancelableReader) Read(p []byte) (int, error) { select { case <-r.ctx.Done(): return 0, errors.New("interrupted") default: return r.src.Read(p) } } func newCancelableReader(ctx context.Context, src io.Reader) *cancelableReader { return &cancelableReader{ ctx: ctx, src: src, } } extract-4.0.0/cancelable_reader_test.go000066400000000000000000000024021465515012500201500ustar00rootroot00000000000000package extract import ( "bytes" "context" "fmt" "testing" "time" "github.com/stretchr/testify/require" ) func TestCancelableReader(t *testing.T) { var b [100000]byte ctx, cancel := context.WithCancel(context.Background()) reader := newCancelableReader(ctx, bytes.NewReader(b[:])) defer cancel() var buff [1000]byte readed := 0 for { n, err := reader.Read(buff[:]) if err != nil { fmt.Println("exit error:", err) require.Equal(t, "EOF", err.Error()) break } require.NotZero(t, n) time.Sleep(10 * time.Millisecond) readed += n } fmt.Println("Readed", readed, "out of", len(b)) require.Equal(t, len(b), readed) } func TestCancelableReaderWithInterruption(t *testing.T) { var b [100000]byte ctx, cancel := context.WithCancel(context.Background()) reader := newCancelableReader(ctx, bytes.NewReader(b[:])) defer cancel() go func() { time.Sleep(100 * time.Millisecond) cancel() }() var buff [1000]byte readed := 0 for { n, err := reader.Read(buff[:]) if err != nil { fmt.Println("exit error:", err) require.Equal(t, "interrupted", err.Error()) break } require.NotZero(t, n) time.Sleep(10 * time.Millisecond) readed += n } fmt.Println("Readed", readed, "out of", len(b)) require.True(t, readed < len(b)) } extract-4.0.0/evil_generator/000077500000000000000000000000001465515012500161765ustar00rootroot00000000000000extract-4.0.0/evil_generator/main.go000066400000000000000000000066611465515012500174620ustar00rootroot00000000000000// This utility is used to generate the archives used as testdata for zipslip vulnerability package main //go:generate go run main.go ../testdata/zipslip import ( "archive/tar" "archive/zip" "bytes" "log" "os" "github.com/arduino/go-paths-helper" ) func main() { if len(os.Args) != 2 { log.Fatal("Missing output directory") } outputDir := paths.New(os.Args[1]) if outputDir.IsNotDir() { log.Fatalf("Output path %s is not a directory", outputDir) } generateEvilZipSlip(outputDir) generateEvilSymLinkPathTraversalTar(outputDir) } func generateEvilZipSlip(outputDir *paths.Path) { evilPathTraversalFiles := []string{ "..", "../../../../../../../../../../../../../../../../../../../../tmp/evil.txt", "some/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt", "/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt", "/some/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt", } winSpecificPathTraversalFiles := []string{ "..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\tmp\\evil.txt", "some\\path\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\tmp\\evil.txt", "\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\tmp\\evil.txt", "\\some\\path\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\tmp\\evil.txt", } winSpecificPathTraversalFiles = append(winSpecificPathTraversalFiles, evilPathTraversalFiles...) // Generate evil zip { buf := new(bytes.Buffer) w := zip.NewWriter(buf) for _, file := range winSpecificPathTraversalFiles { if f, err := w.Create(file); err != nil { log.Fatal(err) } else if _, err = f.Write([]byte("TEST")); err != nil { log.Fatal(err) } } if err := w.Close(); err != nil { log.Fatal(err) } if err := outputDir.Join("evil.zip").WriteFile(buf.Bytes()); err != nil { log.Fatal(err) } } // Generate evil tar { buf := new(bytes.Buffer) w := tar.NewWriter(buf) for _, file := range evilPathTraversalFiles { if err := w.WriteHeader(&tar.Header{ Name: file, Size: 4, Mode: 0666, }); err != nil { log.Fatal(err) } if _, err := w.Write([]byte("TEST")); err != nil { log.Fatal(err) } } if err := w.Close(); err != nil { log.Fatal(err) } if err := outputDir.Join("evil.tar").WriteFile(buf.Bytes()); err != nil { log.Fatal(err) } } // Generate evil tar for windows { buf := new(bytes.Buffer) w := tar.NewWriter(buf) for _, file := range winSpecificPathTraversalFiles { if err := w.WriteHeader(&tar.Header{ Name: file, Size: 4, Mode: 0666, }); err != nil { log.Fatal(err) } if _, err := w.Write([]byte("TEST")); err != nil { log.Fatal(err) } } if err := w.Close(); err != nil { log.Fatal(err) } if err := outputDir.Join("evil-win.tar").WriteFile(buf.Bytes()); err != nil { log.Fatal(err) } } } func generateEvilSymLinkPathTraversalTar(outputDir *paths.Path) { outputTarFile, err := outputDir.Join("evil-link-traversal.tar").Create() if err != nil { log.Fatal(err) } defer outputTarFile.Close() tw := tar.NewWriter(outputTarFile) defer tw.Close() if err := tw.WriteHeader(&tar.Header{ Name: "leak", Linkname: "../../../../../../../../../../../../../../../tmp/something-important", Mode: 0o0777, Size: 0, Typeflag: tar.TypeLink, }); err != nil { log.Fatal(err) } } extract-4.0.0/extract.go000066400000000000000000000105561465515012500152010ustar00rootroot00000000000000// Package extract allows to extract archives in zip, tar.gz or tar.bz2 formats // easily. // // Most of the time you'll just need to call the proper function with a Reader and // a destination: // // file, _ := os.Open("path/to/file.tar.bz2") // extract.Bz2(context.TODO, file, "/path/where/to/extract", nil) // // ``` // // Sometimes you'll want a bit more control over the files, such as extracting a // subfolder of the archive. In this cases you can specify a renamer func that will // change the path for every file: // // var shift = func(path string) string { // parts := strings.Split(path, string(filepath.Separator)) // parts = parts[1:] // return strings.Join(parts, string(filepath.Separator)) // } // extract.Bz2(context.TODO, file, "/path/where/to/extract", shift) // // ``` // // If you don't know which archive you're dealing with (life really is always a surprise) you can use Archive, which will infer the type of archive from the first bytes // // extract.Archive(context.TODO, file, "/path/where/to/extract", nil) package extract import ( "context" "io" "os" ) // Renamer is a function that can be used to rename the files when you're extracting // them. For example you may want to only extract files with a certain pattern. // If you return an empty string they won't be extracted. type Renamer func(string) string // Archive extracts a generic archived stream of data in the specified location. // It automatically detects the archive type and accepts a rename function to // handle the names of the files. // If the file is not an archive, an error is returned. func Archive(ctx context.Context, body io.Reader, location string, rename Renamer) error { extractor := Extractor{FS: fs{}} return extractor.Archive(ctx, body, location, rename) } // Zstd extracts a .zst or .tar.zst archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func Zstd(ctx context.Context, body io.Reader, location string, rename Renamer) error { extractor := Extractor{FS: fs{}} return extractor.Zstd(ctx, body, location, rename) } // Xz extracts a .xz or .tar.xz archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func Xz(ctx context.Context, body io.Reader, location string, rename Renamer) error { extractor := Extractor{FS: fs{}} return extractor.Xz(ctx, body, location, rename) } // Bz2 extracts a .bz2 or .tar.bz2 archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func Bz2(ctx context.Context, body io.Reader, location string, rename Renamer) error { extractor := Extractor{FS: fs{}} return extractor.Bz2(ctx, body, location, rename) } // Gz extracts a .gz or .tar.gz archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func Gz(ctx context.Context, body io.Reader, location string, rename Renamer) error { extractor := Extractor{FS: fs{}} return extractor.Gz(ctx, body, location, rename) } // Tar extracts a .tar archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func Tar(ctx context.Context, body io.Reader, location string, rename Renamer) error { extractor := Extractor{FS: fs{}} return extractor.Tar(ctx, body, location, rename) } // Zip extracts a .zip archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example). func Zip(ctx context.Context, body io.Reader, location string, rename Renamer) error { extractor := Extractor{FS: fs{}} return extractor.Zip(ctx, body, location, rename) } type fs struct{} func (f fs) Link(oldname, newname string) error { return os.Link(oldname, newname) } func (f fs) MkdirAll(path string, perm os.FileMode) error { return os.MkdirAll(path, perm) } func (f fs) Symlink(oldname, newname string) error { return os.Symlink(oldname, newname) } func (f fs) OpenFile(name string, flag int, perm os.FileMode) (*os.File, error) { return os.OpenFile(name, flag, perm) } func (f fs) Remove(path string) error { return os.Remove(path) } func (f fs) Stat(name string) (os.FileInfo, error) { return os.Stat(name) } func (f fs) Chmod(name string, mode os.FileMode) error { return os.Chmod(name, mode) } extract-4.0.0/extract_test.go000066400000000000000000000304351465515012500162360ustar00rootroot00000000000000package extract_test import ( "bytes" "context" "fmt" "io" "net/http" "os" "path/filepath" "runtime" "strconv" "strings" "testing" "github.com/arduino/go-paths-helper" "github.com/codeclysm/extract/v4" "github.com/stretchr/testify/require" ) type Files map[string]string var shift = func(path string) string { parts := strings.Split(path, string(filepath.Separator)) parts = parts[1:] return strings.Join(parts, string(filepath.Separator)) } var subfolder = func(path string) string { if strings.Contains(path, "archive/folder") { return path } return "" } var ExtractCases = []struct { Name string Archive string Renamer extract.Renamer Files Files }{ {"standard bz2", "testdata/archive.tar.bz2", nil, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folderlink": "link", "/archive/folder/file1.txt": "folder/File1", "/archive/file1.txt": "File1", "/archive/file2.txt": "File2", "/archive/link.txt": "File1", }}, {"shift bz2", "testdata/archive.tar.bz2", shift, Files{ "": "dir", "/folder": "dir", "/folderlink": "link", "/folder/file1.txt": "folder/File1", "/file1.txt": "File1", "/file2.txt": "File2", "/link.txt": "File1", }}, {"subfolder bz2", "testdata/archive.tar.bz2", subfolder, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folder/file1.txt": "folder/File1", "/archive/folderlink": "link", }}, {"not tarred bz2", "testdata/singlefile.bz2", nil, Files{ "": "singlefile", }}, {"standard gz", "testdata/archive.tar.gz", nil, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folderlink": "link", "/archive/folder/file1.txt": "folder/File1", "/archive/file1.txt": "File1", "/archive/file2.txt": "File2", "/archive/link.txt": "File1", }}, {"shift gz", "testdata/archive.tar.gz", shift, Files{ "": "dir", "/folder": "dir", "/folderlink": "link", "/folder/file1.txt": "folder/File1", "/file1.txt": "File1", "/file2.txt": "File2", "/link.txt": "File1", }}, {"subfolder gz", "testdata/archive.tar.gz", subfolder, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folder/file1.txt": "folder/File1", "/archive/folderlink": "link", }}, {"not tarred gz", "testdata/singlefile.gz", nil, Files{ "": "singlefile", }}, // Note that the zip format doesn't support hard links {"standard zip", "testdata/archive.zip", nil, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folderlink": "link", "/archive/folder/file1.txt": "folder/File1", "/archive/file1.txt": "File1", "/archive/file2.txt": "File2", "/archive/link.txt": "File1", }}, {"shift zip", "testdata/archive.zip", shift, Files{ "": "dir", "/folder": "dir", "/folderlink": "link", "/folder/file1.txt": "folder/File1", "/file1.txt": "File1", "/file2.txt": "File2", "/link.txt": "File1", }}, {"subfolder zip", "testdata/archive.zip", subfolder, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folder/file1.txt": "folder/File1", "/archive/folderlink": "link", }}, {"standard inferred", "testdata/archive.mistery", nil, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folderlink": "link", "/archive/folder/file1.txt": "folder/File1", "/archive/file1.txt": "File1", "/archive/file2.txt": "File2", "/archive/link.txt": "File1", }}, {"shift inferred", "testdata/archive.mistery", shift, Files{ "": "dir", "/folder": "dir", "/folderlink": "link", "/folder/file1.txt": "folder/File1", "/file1.txt": "File1", "/file2.txt": "File2", "/link.txt": "File1", }}, {"subfolder inferred", "testdata/archive.mistery", subfolder, Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folder/file1.txt": "folder/File1", "/archive/folderlink": "link", }}, {"standard zip with backslashes", "testdata/archive-with-backslashes.zip", nil, Files{ "": "dir", "/AZ3166": "dir", "/AZ3166/libraries": "dir", "/AZ3166/libraries/AzureIoT": "dir", "/AZ3166/libraries/AzureIoT/keywords.txt": "Azure", "/AZ3166/cores": "dir", "/AZ3166/cores/arduino": "dir", "/AZ3166/cores/arduino/azure-iot-sdk-c": "dir", "/AZ3166/cores/arduino/azure-iot-sdk-c/umqtt": "dir", "/AZ3166/cores/arduino/azure-iot-sdk-c/umqtt/src": "dir", }}, {"shift zip with backslashes", "testdata/archive-with-backslashes.zip", shift, Files{ "": "dir", "/libraries": "dir", "/libraries/AzureIoT": "dir", "/libraries/AzureIoT/keywords.txt": "Azure", "/cores": "dir", "/cores/arduino": "dir", "/cores/arduino/azure-iot-sdk-c": "dir", "/cores/arduino/azure-iot-sdk-c/umqtt": "dir", "/cores/arduino/azure-iot-sdk-c/umqtt/src": "dir", }}, } func TestArchiveFailure(t *testing.T) { err := extract.Archive(context.Background(), strings.NewReader("not an archive"), "", nil) require.Error(t, err) require.Contains(t, err.Error(), "Not a supported archive") } func TestExtract(t *testing.T) { for _, test := range ExtractCases { dir, _ := os.MkdirTemp("", "") dir = filepath.Join(dir, "test") data, err := os.ReadFile(test.Archive) if err != nil { t.Fatal(err) } buffer := bytes.NewBuffer(data) switch filepath.Ext(test.Archive) { case ".bz2": err = extract.Bz2(context.Background(), buffer, dir, test.Renamer) case ".gz": err = extract.Gz(context.Background(), buffer, dir, test.Renamer) case ".zip": err = extract.Zip(context.Background(), buffer, dir, test.Renamer) case ".mistery": err = extract.Archive(context.Background(), buffer, dir, test.Renamer) default: t.Fatal("unknown error") } if err != nil { t.Fatal(test.Name, ": Should not fail: "+err.Error()) } testWalk(t, dir, test.Files) err = os.RemoveAll(dir) if err != nil { t.Fatal(err) } } } func TestExtractIdempotency(t *testing.T) { for _, test := range ExtractCases { t.Run(test.Name, func(t *testing.T) { dir, _ := os.MkdirTemp("", "") dir = filepath.Join(dir, "test") data, err := os.ReadFile(test.Archive) if err != nil { t.Fatal(err) } var extractFn func(context.Context, io.Reader, string, extract.Renamer) error switch filepath.Ext(test.Archive) { case ".bz2": extractFn = extract.Bz2 case ".gz": extractFn = extract.Gz case ".zip": extractFn = extract.Zip case ".mistery": extractFn = extract.Archive default: t.Fatal("unknown error") } buffer := bytes.NewBuffer(data) if err = extractFn(context.Background(), buffer, dir, test.Renamer); err != nil { t.Fatal(test.Name, ": Should not fail first extraction: "+err.Error()) } buffer = bytes.NewBuffer(data) if err = extractFn(context.Background(), buffer, dir, test.Renamer); err != nil { t.Fatal(test.Name, ": Should not fail second extraction: "+err.Error()) } testWalk(t, dir, test.Files) err = os.RemoveAll(dir) if err != nil { t.Fatal(err) } }) } } func BenchmarkArchive(b *testing.B) { dir, _ := os.MkdirTemp("", "") data, _ := os.ReadFile("testdata/archive.tar.bz2") b.StartTimer() for i := 0; i < b.N; i++ { buffer := bytes.NewBuffer(data) err := extract.Archive(context.Background(), buffer, filepath.Join(dir, strconv.Itoa(i)), nil) if err != nil { b.Error(err) } } b.StopTimer() err := os.RemoveAll(dir) if err != nil { b.Error(err) } } func BenchmarkTarBz2(b *testing.B) { dir, _ := os.MkdirTemp("", "") data, _ := os.ReadFile("testdata/archive.tar.bz2") b.StartTimer() for i := 0; i < b.N; i++ { buffer := bytes.NewBuffer(data) err := extract.Bz2(context.Background(), buffer, filepath.Join(dir, strconv.Itoa(i)), nil) if err != nil { b.Error(err) } } b.StopTimer() err := os.RemoveAll(dir) if err != nil { b.Error(err) } } func BenchmarkTarGz(b *testing.B) { dir, _ := os.MkdirTemp("", "") data, _ := os.ReadFile("testdata/archive.tar.gz") b.StartTimer() for i := 0; i < b.N; i++ { buffer := bytes.NewBuffer(data) err := extract.Gz(context.Background(), buffer, filepath.Join(dir, strconv.Itoa(i)), nil) if err != nil { b.Error(err) } } b.StopTimer() err := os.RemoveAll(dir) if err != nil { b.Error(err) } } func BenchmarkZip(b *testing.B) { dir, _ := os.MkdirTemp("", "") data, _ := os.ReadFile("testdata/archive.zip") b.StartTimer() for i := 0; i < b.N; i++ { buffer := bytes.NewBuffer(data) err := extract.Zip(context.Background(), buffer, filepath.Join(dir, strconv.Itoa(i)), nil) if err != nil { b.Error(err) } } b.StopTimer() err := os.RemoveAll(dir) if err != nil { b.Error(err) } } func testWalk(t *testing.T, dir string, testFiles Files) { files := Files{} filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { path = strings.Replace(path, dir, "", 1) if info.IsDir() { files[path] = "dir" } else if info.Mode()&os.ModeSymlink != 0 { files[path] = "link" } else { data, err := os.ReadFile(filepath.Join(dir, path)) require.NoError(t, err) files[path] = strings.TrimSpace(string(data)) } return nil }) for file, kind := range files { k, ok := testFiles[file] if !ok { t.Error(file + " should not exist") continue } if kind != k { t.Error(file + " should be " + k + ", not " + kind) continue } } for file, kind := range testFiles { k, ok := files[file] if !ok { t.Error(file + " should exist") continue } if kind != k { t.Error(file + " should be " + kind + ", not " + k) continue } } } func TestTarGzMemoryConsumption(t *testing.T) { archive := paths.New("testdata/big.tar.gz") download(t, "http://downloads.arduino.cc/gcc-arm-none-eabi-4.8.3-2014q1-windows.tar.gz", archive) tmpDir, err := paths.MkTempDir("", "") require.NoError(t, err) defer tmpDir.RemoveAll() f, err := archive.Open() require.NoError(t, err) var m, m2 runtime.MemStats runtime.GC() runtime.ReadMemStats(&m) err = extract.Archive(context.Background(), f, tmpDir.String(), nil) require.NoError(t, err) runtime.ReadMemStats(&m2) heapUsed := m2.HeapInuse - m.HeapInuse if m2.HeapInuse < m.HeapInuse { heapUsed = 0 } fmt.Println("Heap memory used during the test:", heapUsed) require.True(t, heapUsed < 5000000, "heap consumption should be less than 5M but is %d", heapUsed) } func TestZipMemoryConsumption(t *testing.T) { archive := paths.New("testdata/big.zip") download(t, "http://downloads.arduino.cc/tools/gcc-arm-none-eabi-7-2017-q4-major-win32-arduino1.zip", archive) tmpDir, err := paths.MkTempDir("", "") require.NoError(t, err) defer tmpDir.RemoveAll() f, err := archive.Open() require.NoError(t, err) var m, m2 runtime.MemStats runtime.GC() runtime.ReadMemStats(&m) err = extract.Archive(context.Background(), f, tmpDir.String(), nil) require.NoError(t, err) runtime.ReadMemStats(&m2) heapUsed := m2.HeapInuse - m.HeapInuse if m2.HeapInuse < m.HeapInuse { heapUsed = 0 } fmt.Println("Heap memory used during the test:", heapUsed) require.True(t, heapUsed < 10000000, "heap consumption should be less than 10M but is %d", heapUsed) } func download(t require.TestingT, url string, file *paths.Path) { if file.Exist() { return } fmt.Printf("Downloading %s in %s\n", url, file) resp, err := http.Get(url) require.NoError(t, err) defer resp.Body.Close() out, err := file.Create() require.NoError(t, err) _, err = io.Copy(out, resp.Body) out.Close() if err != nil { file.Remove() } require.NoError(t, err) } extract-4.0.0/extractor.go000066400000000000000000000300531465515012500155340ustar00rootroot00000000000000package extract import ( "archive/tar" "archive/zip" "bytes" "compress/bzip2" "compress/gzip" "context" "fmt" "io" "os" "path/filepath" "strings" filetype "github.com/h2non/filetype" "github.com/h2non/filetype/types" "github.com/juju/errors" "github.com/klauspost/compress/zstd" "github.com/ulikunitz/xz" ) // Extractor is more sophisticated than the base functions. It allows to write over an interface // rather than directly on the filesystem type Extractor struct { FS interface { // Link creates newname as a hard link to the oldname file. If there is an error, it will be of type *LinkError. Link(oldname, newname string) error // MkdirAll creates the directory path and all his parents if needed. MkdirAll(path string, perm os.FileMode) error // OpenFile opens the named file with specified flag (O_RDONLY etc.). OpenFile(name string, flag int, perm os.FileMode) (*os.File, error) // Symlink creates newname as a symbolic link to oldname. Symlink(oldname, newname string) error // Remove removes the named file or (empty) directory. Remove(path string) error // Stat returns a FileInfo describing the named file. Stat(name string) (os.FileInfo, error) // Chmod changes the mode of the named file to mode. // If the file is a symbolic link, it changes the mode of the link's target. Chmod(name string, mode os.FileMode) error } } // Archive extracts a generic archived stream of data in the specified location. // It automatically detects the archive type and accepts a rename function to // handle the names of the files. // If the file is not an archive, an error is returned. func (e *Extractor) Archive(ctx context.Context, body io.Reader, location string, rename Renamer) error { body, kind, err := match(body) if err != nil { errors.Annotatef(err, "Detect archive type") } switch kind.Extension { case "zip": return e.Zip(ctx, body, location, rename) case "gz": return e.Gz(ctx, body, location, rename) case "bz2": return e.Bz2(ctx, body, location, rename) case "xz": return e.Xz(ctx, body, location, rename) case "zst": return e.Zstd(ctx, body, location, rename) case "tar": return e.Tar(ctx, body, location, rename) default: return errors.New("Not a supported archive: " + kind.Extension) } } func (e *Extractor) Zstd(ctx context.Context, body io.Reader, location string, rename Renamer) error { reader, err := zstd.NewReader(body) if err != nil { return errors.Annotatef(err, "opening zstd: detect") } body, kind, err := match(reader) if err != nil { return errors.Annotatef(err, "extract zstd: detect") } if kind.Extension == "tar" { return e.Tar(ctx, body, location, rename) } err = e.copy(ctx, location, 0666, body) if err != nil { return err } return nil } func (e *Extractor) Xz(ctx context.Context, body io.Reader, location string, rename Renamer) error { reader, err := xz.NewReader(body) if err != nil { return errors.Annotatef(err, "opening xz: detect") } body, kind, err := match(reader) if err != nil { return errors.Annotatef(err, "extract xz: detect") } if kind.Extension == "tar" { return e.Tar(ctx, body, location, rename) } err = e.copy(ctx, location, 0666, body) if err != nil { return err } return nil } // Bz2 extracts a .bz2 or .tar.bz2 archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func (e *Extractor) Bz2(ctx context.Context, body io.Reader, location string, rename Renamer) error { reader := bzip2.NewReader(body) body, kind, err := match(reader) if err != nil { return errors.Annotatef(err, "extract bz2: detect") } if kind.Extension == "tar" { return e.Tar(ctx, body, location, rename) } err = e.copy(ctx, location, 0666, body) if err != nil { return err } return nil } // Gz extracts a .gz or .tar.gz archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func (e *Extractor) Gz(ctx context.Context, body io.Reader, location string, rename Renamer) error { reader, err := gzip.NewReader(body) if err != nil { return errors.Annotatef(err, "Gunzip") } body, kind, err := match(reader) if err != nil { return err } if kind.Extension == "tar" { return e.Tar(ctx, body, location, rename) } err = e.copy(ctx, location, 0666, body) if err != nil { return err } return nil } type link struct { Name string Path string } // Tar extracts a .tar archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example) func (e *Extractor) Tar(ctx context.Context, body io.Reader, location string, rename Renamer) error { links := []*link{} symlinks := []*link{} // We make the first pass creating the directory structure, or we could end up // attempting to create a file where there's no folder tr := tar.NewReader(body) for { select { case <-ctx.Done(): return errors.New("interrupted") default: } header, err := tr.Next() if err == io.EOF { break } if err != nil { return errors.Annotatef(err, "Read tar stream") } path := header.Name if rename != nil { path = rename(path) } if path == "" { continue } if path, err = safeJoin(location, path); err != nil { continue } info := header.FileInfo() switch header.Typeflag { case tar.TypeDir: if err := e.FS.MkdirAll(path, info.Mode()); err != nil { return errors.Annotatef(err, "Create directory %s", path) } case tar.TypeReg, tar.TypeRegA: if err := e.copy(ctx, path, info.Mode(), tr); err != nil { return errors.Annotatef(err, "Create file %s", path) } case tar.TypeLink: name := header.Linkname if rename != nil { name = rename(name) } name, err = safeJoin(location, name) if err != nil { continue } links = append(links, &link{Path: path, Name: name}) case tar.TypeSymlink: symlinks = append(symlinks, &link{Path: path, Name: header.Linkname}) } } // Now we make another pass creating the links for i := range links { select { case <-ctx.Done(): return errors.New("interrupted") default: } _ = e.FS.Remove(links[i].Path) if err := e.FS.Link(links[i].Name, links[i].Path); err != nil { return errors.Annotatef(err, "Create link %s", links[i].Path) } } if err := e.extractSymlinks(ctx, symlinks); err != nil { return err } return nil } func (e *Extractor) extractSymlinks(ctx context.Context, symlinks []*link) error { for _, symlink := range symlinks { select { case <-ctx.Done(): return errors.New("interrupted") default: } // Make a placeholder and replace it after unpacking everything _ = e.FS.Remove(symlink.Path) f, err := e.FS.OpenFile(symlink.Path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, os.FileMode(0666)) if err != nil { return fmt.Errorf("creating symlink placeholder %s: %w", symlink.Path, err) } if err := f.Close(); err != nil { return fmt.Errorf("creating symlink placeholder %s: %w", symlink.Path, err) } } for _, symlink := range symlinks { select { case <-ctx.Done(): return errors.New("interrupted") default: } _ = e.FS.Remove(symlink.Path) if err := e.FS.Symlink(symlink.Name, symlink.Path); err != nil { return errors.Annotatef(err, "Create link %s", symlink.Path) } } return nil } // Zip extracts a .zip archived stream of data in the specified location. // It accepts a rename function to handle the names of the files (see the example). func (e *Extractor) Zip(ctx context.Context, body io.Reader, location string, rename Renamer) error { var bodySize int64 bodyReaderAt, isReaderAt := (body).(io.ReaderAt) if bodySeeker, isSeeker := (body).(io.Seeker); isReaderAt && isSeeker { // get the size by seeking to the end endPos, err := bodySeeker.Seek(0, io.SeekEnd) if err != nil { return fmt.Errorf("failed to seek to the end of the body: %s", err) } // reset the reader to the beginning if _, err := bodySeeker.Seek(0, io.SeekStart); err != nil { return fmt.Errorf("failed to seek to the beginning of the body: %w", err) } bodySize = endPos } else { // read the whole body into a buffer. Not sure this is the best way to do it buffer := bytes.NewBuffer([]byte{}) copyCancel(ctx, buffer, body) bodyReaderAt = bytes.NewReader(buffer.Bytes()) bodySize = int64(buffer.Len()) } archive, err := zip.NewReader(bodyReaderAt, bodySize) if err != nil { return errors.Annotatef(err, "Read the zip file") } links := []*link{} // We make the first pass creating the directory structure, or we could end up // attempting to create a file where there's no folder for _, header := range archive.File { select { case <-ctx.Done(): return errors.New("interrupted") default: } path := header.Name // Replace backslash with forward slash. There are archives in the wild made with // buggy compressors that use backslash as path separator. The ZIP format explicitly // denies the use of "\" so we just replace it with slash "/". // Moreover it seems that folders are stored as "files" but with a final "\" in the // filename... oh, well... forceDir := strings.HasSuffix(path, "\\") path = strings.Replace(path, "\\", "/", -1) if rename != nil { path = rename(path) } if path == "" { continue } if path, err = safeJoin(location, path); err != nil { continue } info := header.FileInfo() switch { case info.IsDir() || forceDir: dirMode := info.Mode() | os.ModeDir | 0100 if _, err := e.FS.Stat(path); err == nil { // directory already created, update permissions if err := e.FS.Chmod(path, dirMode); err != nil { return errors.Annotatef(err, "Set permissions %s", path) } } else if err := e.FS.MkdirAll(path, dirMode); err != nil { return errors.Annotatef(err, "Create directory %s", path) } // We only check for symlinks because hard links aren't possible case info.Mode()&os.ModeSymlink != 0: if f, err := header.Open(); err != nil { return errors.Annotatef(err, "Open link %s", path) } else if name, err := io.ReadAll(f); err != nil { return errors.Annotatef(err, "Read address of link %s", path) } else { links = append(links, &link{Path: path, Name: string(name)}) f.Close() } default: if f, err := header.Open(); err != nil { return errors.Annotatef(err, "Open file %s", path) } else if err := e.copy(ctx, path, info.Mode(), f); err != nil { return errors.Annotatef(err, "Create file %s", path) } else { f.Close() } } } if err := e.extractSymlinks(ctx, links); err != nil { return err } return nil } func (e *Extractor) copy(ctx context.Context, path string, mode os.FileMode, src io.Reader) error { // We add the execution permission to be able to create files inside it err := e.FS.MkdirAll(filepath.Dir(path), mode|os.ModeDir|0100) if err != nil { return err } _ = e.FS.Remove(path) file, err := e.FS.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, mode) if err != nil { return err } defer file.Close() _, err = copyCancel(ctx, file, src) return err } // match reads the first 512 bytes, calls types.Match and returns a reader // for the whole stream func match(r io.Reader) (io.Reader, types.Type, error) { buffer := make([]byte, 512) n, err := r.Read(buffer) if err != nil && err != io.EOF { return nil, types.Unknown, err } if seeker, ok := r.(io.Seeker); ok { // if the stream is seekable, we just rewind it if _, err := seeker.Seek(0, io.SeekStart); err != nil { return nil, types.Unknown, err } } else { // otherwise we create a new reader that will prepend the buffer r = io.MultiReader(bytes.NewBuffer(buffer[:n]), r) } typ, err := filetype.Match(buffer) return r, typ, err } // safeJoin performs a filepath.Join of 'parent' and 'subdir' but returns an error // if the resulting path points outside of 'parent'. func safeJoin(parent, subdir string) (string, error) { res := filepath.Join(parent, subdir) if !strings.HasSuffix(parent, string(os.PathSeparator)) { parent += string(os.PathSeparator) } if !strings.HasPrefix(res, parent) { return res, errors.Errorf("unsafe path join: '%s' with '%s'", parent, subdir) } return res, nil } extract-4.0.0/extractor_test.go000066400000000000000000000270061465515012500165770ustar00rootroot00000000000000package extract_test import ( "archive/tar" "archive/zip" "bytes" "context" "fmt" "os" "path/filepath" "runtime" "strconv" "strings" "testing" "github.com/arduino/go-paths-helper" "github.com/codeclysm/extract/v4" "github.com/stretchr/testify/require" ) func TestExtractors(t *testing.T) { type archiveTest struct { name string file *paths.Path } testCases := []archiveTest{ {"TarGz", paths.New("testdata/archive.tar.gz")}, {"TarBz2", paths.New("testdata/archive.tar.bz2")}, {"TarXz", paths.New("testdata/archive.tar.xz")}, {"TarZstd", paths.New("testdata/archive.tar.zst")}, {"Zip", paths.New("testdata/archive.zip")}, } for _, test := range testCases { t.Run(test.name, func(t *testing.T) { testArchive(t, test.file) }) } } func testArchive(t *testing.T, archivePath *paths.Path) { tmp, err := paths.MkTempDir("", "") require.NoError(t, err) defer tmp.RemoveAll() data, err := archivePath.ReadFile() require.NoError(t, err) buffer := bytes.NewBuffer(data) extractor := extract.Extractor{ FS: MockDisk{ Base: tmp.String(), }, } err = extractor.Archive(context.Background(), buffer, "/", nil) require.NoError(t, err) files := Files{ "": "dir", "/archive": "dir", "/archive/folder": "dir", "/archive/folderlink": "link", "/archive/folder/file1.txt": "folder/File1", "/archive/file1.txt": "File1", "/archive/file2.txt": "File2", "/archive/link.txt": "File1", } testWalk(t, tmp.String(), files) } func TestZipSlipHardening(t *testing.T) { t.Run("ZipTraversal", func(t *testing.T) { logger := &LoggingFS{} extractor := extract.Extractor{FS: logger} data, err := os.Open("testdata/zipslip/evil.zip") require.NoError(t, err) require.NoError(t, extractor.Zip(context.Background(), data, "/tmp/test", nil)) require.NoError(t, data.Close()) fmt.Print(logger) require.Empty(t, logger.Journal) }) t.Run("TarTraversal", func(t *testing.T) { logger := &LoggingFS{} extractor := extract.Extractor{FS: logger} data, err := os.Open("testdata/zipslip/evil.tar") require.NoError(t, err) require.NoError(t, extractor.Tar(context.Background(), data, "/tmp/test", nil)) require.NoError(t, data.Close()) fmt.Print(logger) require.Empty(t, logger.Journal) }) t.Run("TarLinkTraversal", func(t *testing.T) { logger := &LoggingFS{} extractor := extract.Extractor{FS: logger} data, err := os.Open("testdata/zipslip/evil-link-traversal.tar") require.NoError(t, err) require.NoError(t, extractor.Tar(context.Background(), data, "/tmp/test", nil)) require.NoError(t, data.Close()) fmt.Print(logger) require.Empty(t, logger.Journal) }) t.Run("WindowsTarTraversal", func(t *testing.T) { if runtime.GOOS != "windows" { t.Skip("Skipped on non-Windows host") } logger := &LoggingFS{} extractor := extract.Extractor{FS: logger} data, err := os.Open("testdata/zipslip/evil-win.tar") require.NoError(t, err) require.NoError(t, extractor.Tar(context.Background(), data, "/tmp/test", nil)) require.NoError(t, data.Close()) fmt.Print(logger) require.Empty(t, logger.Journal) }) } func mkTempDir(t *testing.T) *paths.Path { tmp, err := paths.MkTempDir("", "test") require.NoError(t, err) t.Cleanup(func() { tmp.RemoveAll() }) return tmp } func TestSymLinkMazeHardening(t *testing.T) { addTarSymlink := func(t *testing.T, tw *tar.Writer, new, old string) { err := tw.WriteHeader(&tar.Header{ Mode: 0o0777, Typeflag: tar.TypeSymlink, Name: new, Linkname: old, }) require.NoError(t, err) } addZipSymlink := func(t *testing.T, zw *zip.Writer, new, old string) { h := &zip.FileHeader{Name: new, Method: zip.Deflate} h.SetMode(os.ModeSymlink) w, err := zw.CreateHeader(h) require.NoError(t, err) _, err = w.Write([]byte(old)) require.NoError(t, err) } t.Run("TarWithSymlinkToAbsPath", func(t *testing.T) { // Create target dir tmp := mkTempDir(t) targetDir := tmp.Join("test") require.NoError(t, targetDir.Mkdir()) // Make a tar archive with symlink maze outputTar := bytes.NewBuffer(nil) tw := tar.NewWriter(outputTar) addTarSymlink(t, tw, "aaa", tmp.String()) addTarSymlink(t, tw, "aaa/sym", "something") require.NoError(t, tw.Close()) // Run extract extractor := extract.Extractor{FS: &LoggingFS{}} require.Error(t, extractor.Tar(context.Background(), outputTar, targetDir.String(), nil)) require.NoFileExists(t, tmp.Join("sym").String()) }) t.Run("ZipWithSymlinkToAbsPath", func(t *testing.T) { // Create target dir tmp := mkTempDir(t) targetDir := tmp.Join("test") require.NoError(t, targetDir.Mkdir()) // Make a zip archive with symlink maze outputZip := bytes.NewBuffer(nil) zw := zip.NewWriter(outputZip) addZipSymlink(t, zw, "aaa", tmp.String()) addZipSymlink(t, zw, "aaa/sym", "something") require.NoError(t, zw.Close()) // Run extract extractor := extract.Extractor{FS: &LoggingFS{}} err := extractor.Zip(context.Background(), outputZip, targetDir.String(), nil) require.NoFileExists(t, tmp.Join("sym").String()) require.Error(t, err) }) t.Run("TarWithSymlinkToRelativeExternalPath", func(t *testing.T) { // Create target dir tmp := mkTempDir(t) targetDir := tmp.Join("test") require.NoError(t, targetDir.Mkdir()) checkDir := tmp.Join("secret") require.NoError(t, checkDir.MkdirAll()) // Make a tar archive with regular symlink maze outputTar := bytes.NewBuffer(nil) tw := tar.NewWriter(outputTar) addTarSymlink(t, tw, "aaa", "../secret") addTarSymlink(t, tw, "aaa/sym", "something") require.NoError(t, tw.Close()) extractor := extract.Extractor{FS: &LoggingFS{}} require.Error(t, extractor.Tar(context.Background(), outputTar, targetDir.String(), nil)) require.NoFileExists(t, checkDir.Join("sym").String()) }) t.Run("TarWithSymlinkToInternalPath", func(t *testing.T) { // Create target dir tmp := mkTempDir(t) targetDir := tmp.Join("test") require.NoError(t, targetDir.Mkdir()) // Make a tar archive with regular symlink maze outputTar := bytes.NewBuffer(nil) tw := tar.NewWriter(outputTar) require.NoError(t, tw.WriteHeader(&tar.Header{Mode: 0o0777, Typeflag: tar.TypeDir, Name: "tmp"})) addTarSymlink(t, tw, "aaa", "tmp") addTarSymlink(t, tw, "aaa/sym", "something") require.NoError(t, tw.Close()) extractor := extract.Extractor{FS: &LoggingFS{}} require.Error(t, extractor.Tar(context.Background(), outputTar, targetDir.String(), nil)) require.NoFileExists(t, targetDir.Join("tmp", "sym").String()) }) t.Run("TarWithDoubleSymlinkToExternalPath", func(t *testing.T) { // Create target dir tmp := mkTempDir(t) targetDir := tmp.Join("test") require.NoError(t, targetDir.Mkdir()) fmt.Println("TMP:", tmp) fmt.Println("TARGET DIR:", targetDir) // Make a tar archive with regular symlink maze outputTar := bytes.NewBuffer(nil) tw := tar.NewWriter(outputTar) tw.WriteHeader(&tar.Header{Name: "fake", Mode: 0777, Typeflag: tar.TypeDir}) addTarSymlink(t, tw, "sym-maze", tmp.String()) addTarSymlink(t, tw, "sym-maze", "fake") addTarSymlink(t, tw, "sym-maze/oops", "/tmp/something") require.NoError(t, tw.Close()) extractor := extract.Extractor{FS: &LoggingFS{}} require.Error(t, extractor.Tar(context.Background(), outputTar, targetDir.String(), nil)) require.NoFileExists(t, tmp.Join("oops").String()) }) t.Run("TarWithSymlinkToExternalPathWithoutMazing", func(t *testing.T) { // Create target dir tmp := mkTempDir(t) targetDir := tmp.Join("test") require.NoError(t, targetDir.Mkdir()) // Make a tar archive with valid symlink maze outputTar := bytes.NewBuffer(nil) tw := tar.NewWriter(outputTar) require.NoError(t, tw.WriteHeader(&tar.Header{Mode: 0o0777, Typeflag: tar.TypeDir, Name: "tmp"})) addTarSymlink(t, tw, "aaa", "../tmp") require.NoError(t, tw.Close()) extractor := extract.Extractor{FS: &LoggingFS{}} require.NoError(t, extractor.Tar(context.Background(), outputTar, targetDir.String(), nil)) st, err := targetDir.Join("aaa").Lstat() require.NoError(t, err) require.Equal(t, "aaa", st.Name()) }) } func TestUnixPermissions(t *testing.T) { // Disable user's umask to enable creation of files with any permission, restore it after the test userUmask := UnixUmaskZero() defer UnixUmask(userUmask) archiveFilenames := []string{ "testdata/permissions.zip", "testdata/permissions.tar", } for _, archiveFilename := range archiveFilenames { tmp, err := paths.MkTempDir("", "") require.NoError(t, err) defer tmp.RemoveAll() f, err := paths.New(archiveFilename).Open() require.NoError(t, err) err = extract.Archive(context.Background(), f, tmp.String(), nil) require.NoError(t, err) filepath.Walk(tmp.String(), func(path string, info os.FileInfo, _ error) error { filename := filepath.Base(path) // Desired permissions indicated by part of the filenames inside the zip/tar files if strings.HasPrefix(filename, "dir") { desiredPermString := strings.Split(filename, "dir")[1] desiredPerms, _ := strconv.ParseUint(desiredPermString, 8, 32) require.Equal(t, os.ModeDir|os.FileMode(OsDirPerms(desiredPerms)), info.Mode()) } else if strings.HasPrefix(filename, "file") { desiredPermString := strings.Split(filename, "file")[1] desiredPerms, _ := strconv.ParseUint(desiredPermString, 8, 32) require.Equal(t, os.FileMode(OsFilePerms(desiredPerms)), info.Mode()) } return nil }) } } func TestZipDirectoryPermissions(t *testing.T) { // Disable user's umask to enable creation of files with any permission, restore it after the test userUmask := UnixUmaskZero() defer UnixUmask(userUmask) // This arduino library has files before their containing directories in the zip, // so a good test case that these directory permissions are created correctly archive := paths.New("testdata/filesbeforedirectories.zip") download(t, "https://downloads.arduino.cc/libraries/github.com/arduino-libraries/LiquidCrystal-1.0.7.zip", archive) tmp, err := paths.MkTempDir("", "") require.NoError(t, err) defer tmp.RemoveAll() f, err := archive.Open() require.NoError(t, err) err = extract.Archive(context.Background(), f, tmp.String(), nil) require.NoError(t, err) filepath.Walk(tmp.String(), func(path string, info os.FileInfo, _ error) error { // Test files and directories (excluding the parent) match permissions from the zip file if path != tmp.String() { if info.IsDir() { require.Equal(t, os.ModeDir|os.FileMode(OsDirPerms(0755)), info.Mode()) } else { require.Equal(t, os.FileMode(OsFilePerms(0644)), info.Mode()) } } return nil }) } // MockDisk is a disk that chroots to a directory type MockDisk struct { Base string } func (m MockDisk) Link(oldname, newname string) error { oldname = filepath.Join(m.Base, oldname) newname = filepath.Join(m.Base, newname) return os.Link(oldname, newname) } func (m MockDisk) MkdirAll(path string, perm os.FileMode) error { path = filepath.Join(m.Base, path) return os.MkdirAll(path, perm) } func (m MockDisk) Symlink(oldname, newname string) error { oldname = filepath.Join(m.Base, oldname) newname = filepath.Join(m.Base, newname) return os.Symlink(oldname, newname) } func (m MockDisk) OpenFile(name string, flag int, perm os.FileMode) (*os.File, error) { name = filepath.Join(m.Base, name) return os.OpenFile(name, flag, perm) } func (m MockDisk) Remove(path string) error { return os.Remove(filepath.Join(m.Base, path)) } func (m MockDisk) Stat(name string) (os.FileInfo, error) { name = filepath.Join(m.Base, name) return os.Stat(name) } func (m MockDisk) Chmod(name string, mode os.FileMode) error { name = filepath.Join(m.Base, name) return os.Chmod(name, mode) } extract-4.0.0/go.mod000066400000000000000000000013111465515012500142730ustar00rootroot00000000000000module github.com/codeclysm/extract/v4 go 1.22 toolchain go1.22.3 require ( github.com/arduino/go-paths-helper v1.12.1 github.com/h2non/filetype v1.1.3 github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5 github.com/klauspost/compress v1.15.13 github.com/stretchr/testify v1.9.0 github.com/ulikunitz/xz v0.5.12 golang.org/x/sys v0.16.0 ) require ( github.com/davecgh/go-spew v1.1.1 // indirect github.com/juju/testing v0.0.0-20200510222523-6c8c298c77a0 // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/pmezard/go-difflib v1.0.0 // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) extract-4.0.0/go.sum000066400000000000000000000102531465515012500143250ustar00rootroot00000000000000github.com/arduino/go-paths-helper v1.12.1 h1:WkxiVUxBjKWlLMiMuYy8DcmVrkxdP7aKxQOAq7r2lVM= github.com/arduino/go-paths-helper v1.12.1/go.mod h1:jcpW4wr0u69GlXhTYydsdsqAjLaYK5n7oWHfKqOG6LM= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/h2non/filetype v1.1.3 h1:FKkx9QbD7HR/zjK1Ia5XiBsq9zdLi5Kf3zGyFTAFkGg= github.com/h2non/filetype v1.1.3/go.mod h1:319b3zT68BvV+WRj7cwy856M2ehB3HqNOt6sy1HndBY= github.com/juju/clock v0.0.0-20180524022203-d293bb356ca4/go.mod h1:nD0vlnrUjcjJhqN5WuCWZyzfd5AHZAC9/ajvbSx69xA= github.com/juju/errors v0.0.0-20150916125642-1b5e39b83d18/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q= github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5 h1:rhqTjzJlm7EbkELJDKMTU7udov+Se0xZkWmugr6zGok= github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q= github.com/juju/loggo v0.0.0-20170605014607-8232ab8918d9 h1:Y+lzErDTURqeXqlqYi4YBYbDd7ycU74gW1ADt57/bgY= github.com/juju/loggo v0.0.0-20170605014607-8232ab8918d9/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U= github.com/juju/retry v0.0.0-20160928201858-1998d01ba1c3/go.mod h1:OohPQGsr4pnxwD5YljhQ+TZnuVRYpa5irjugL1Yuif4= github.com/juju/testing v0.0.0-20200510222523-6c8c298c77a0 h1:+WWUkhnTjV6RNOxkcwk79qrjeyHEHvBzlneueBsatX4= github.com/juju/testing v0.0.0-20200510222523-6c8c298c77a0/go.mod h1:hpGvhGHPVbNBraRLZEhoQwFLMrjK8PSlO4D3nDjKYXo= github.com/juju/utils v0.0.0-20180808125547-9dfc6dbfb02b/go.mod h1:6/KLg8Wz/y2KVGWEpkK9vMNGkOnu4k/cqs8Z1fKjTOk= github.com/juju/version v0.0.0-20161031051906-1f41e27e54f2/go.mod h1:kE8gK5X0CImdr7qpSKl3xB2PmpySSmfj7zVbkZFs81U= github.com/klauspost/compress v1.15.13 h1:NFn1Wr8cfnenSJSA46lLq4wHCcBzKTSjnBIexDMMOV0= github.com/klauspost/compress v1.15.13/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc= github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= golang.org/x/crypto v0.0.0-20180214000028-650f4a345ab4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/net v0.0.0-20180406214816-61147c48b25b/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/check.v1 v1.0.0-20160105164936-4f90aeace3a2/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/mgo.v2 v2.0.0-20160818015218-f2b6f6c918c4 h1:hILp2hNrRnYjZpmIbx70psAHbBSEcQ1NIzDcUbJ1b6g= gopkg.in/mgo.v2 v2.0.0-20160818015218-f2b6f6c918c4/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= gopkg.in/yaml.v2 v2.0.0-20170712054546-1be3d31502d6 h1:CvAnnm1XvMjfib69SZzDwgWfOk+PxYz0hA0HBupilBA= gopkg.in/yaml.v2 v2.0.0-20170712054546-1be3d31502d6/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= extract-4.0.0/loggingfs_test.go000066400000000000000000000057511465515012500165460ustar00rootroot00000000000000package extract_test import ( "fmt" "os" ) // LoggingFS is a disk that logs every operation, useful for unit-testing. type LoggingFS struct { Journal []*LoggedOp } // LoggedOp is an operation logged in a LoggingFS journal. type LoggedOp struct { Op string Path string OldPath string Mode os.FileMode Info os.FileInfo Flags int Err error } func (op *LoggedOp) String() string { res := "" switch op.Op { case "link": res += fmt.Sprintf("link %s -> %s", op.Path, op.OldPath) case "symlink": res += fmt.Sprintf("symlink %s -> %s", op.Path, op.OldPath) case "mkdirall": res += fmt.Sprintf("mkdirall %v %s", op.Mode, op.Path) case "open": res += fmt.Sprintf("open %v %s (flags=%04x)", op.Mode, op.Path, op.Flags) case "remove": res += fmt.Sprintf("remove %v", op.Path) case "stat": res += fmt.Sprintf("stat %v -> %v", op.Path, op.Info) case "chmod": res += fmt.Sprintf("chmod %v %s", op.Mode, op.Path) default: panic("unknown LoggedOP " + op.Op) } if op.Err != nil { res += " error: " + op.Err.Error() } else { res += " success" } return res } func (m *LoggingFS) Link(oldname, newname string) error { err := os.Link(oldname, newname) op := &LoggedOp{ Op: "link", OldPath: oldname, Path: newname, Err: err, } m.Journal = append(m.Journal, op) fmt.Println("FS>", op) return err } func (m *LoggingFS) MkdirAll(path string, perm os.FileMode) error { err := os.MkdirAll(path, perm) op := &LoggedOp{ Op: "mkdirall", Path: path, Mode: perm, Err: err, } m.Journal = append(m.Journal, op) fmt.Println("FS>", op) return err } func (m *LoggingFS) Symlink(oldname, newname string) error { err := os.Symlink(oldname, newname) op := &LoggedOp{ Op: "symlink", OldPath: oldname, Path: newname, Err: err, } m.Journal = append(m.Journal, op) fmt.Println("FS>", op) return err } func (m *LoggingFS) OpenFile(name string, flags int, perm os.FileMode) (*os.File, error) { f, err := os.OpenFile(name, flags, perm) op := &LoggedOp{ Op: "open", Path: name, Mode: perm, Flags: flags, Err: err, } m.Journal = append(m.Journal, op) fmt.Println("FS>", op) return f, err } func (m *LoggingFS) Remove(path string) error { err := os.Remove(path) op := &LoggedOp{ Op: "remove", Path: path, } m.Journal = append(m.Journal, op) fmt.Println("FS>", op) return err } func (m *LoggingFS) Stat(path string) (os.FileInfo, error) { info, err := os.Stat(path) op := &LoggedOp{ Op: "stat", Path: path, Info: info, Err: err, } m.Journal = append(m.Journal, op) fmt.Println("FS>", op) return info, err } func (m *LoggingFS) Chmod(path string, mode os.FileMode) error { err := os.Chmod(path, mode) op := &LoggedOp{ Op: "chmod", Path: path, Mode: mode, Err: err, } m.Journal = append(m.Journal, op) fmt.Println("FS>", op) return err } func (m *LoggingFS) String() string { res := "" for _, op := range m.Journal { res += op.String() res += "\n" } return res } extract-4.0.0/safejoin_test.go000066400000000000000000000016461465515012500163640ustar00rootroot00000000000000package extract import ( "testing" "github.com/stretchr/testify/require" ) func TestSafeJoin(t *testing.T) { ok := func(parent, subdir string) { _, err := safeJoin(parent, subdir) require.NoError(t, err, "joining '%s' and '%s'", parent, subdir) } ko := func(parent, subdir string) { _, err := safeJoin(parent, subdir) require.Error(t, err, "joining '%s' and '%s'", parent, subdir) } ok("/", "more/path") ok("/path", "more/path") ok("/path/", "more/path") ok("/path/subdir", "more/path") ok("/path/subdir/", "more/path") ok("/", "..") // ! since we are extracting to / is ok-ish to accept ".."? ko("/path", "..") ko("/path/", "..") ko("/path/subdir", "..") ko("/path/subdir/", "..") ok("/", "../pathpath") // ! since we are extracting to / is ok-ish to accept "../pathpath"? ko("/path", "../pathpath") ko("/path/", "../pathpath") ko("/path/subdir", "../pathpath") ko("/path/subdir/", "../pathpath") } extract-4.0.0/testdata/000077500000000000000000000000001465515012500150025ustar00rootroot00000000000000extract-4.0.0/testdata/.gitignore000066400000000000000000000000561465515012500167730ustar00rootroot00000000000000big.tar.gz big.zip filesbeforedirectories.zip extract-4.0.0/testdata/archive-with-backslashes.zip000066400000000000000000000006231465515012500224020ustar00rootroot00000000000000PKa?N/AZ3166\cores\arduino\azure-iot-sdk-c\umqtt\src\PK i}N$>&AZ3166\libraries\AzureIoT\keywords.txtUT k\k\ux Azure PKa?N/AZ3166\cores\arduino\azure-iot-sdk-c\umqtt\src\PK i}N$>&MAZ3166\libraries\AzureIoT\keywords.txtUTk\ux PKextract-4.0.0/testdata/archive.mistery000066400000000000000000000004401465515012500200370ustar00rootroot00000000000000BZh91AY&SYߓQNÐ@A@oo@082 F!`d#A (TS{SQ=@ͩEOwˋ% iL/BMd[bI|?LE 70ƇҚᝤ8,,/s(, 8"n-T\VNܥE]kէi6D?gǚ:z.1 Ed}R Z:!7H j extract-4.0.0/testdata/archive.tar.bz2000066400000000000000000000004401465515012500176250ustar00rootroot00000000000000BZh91AY&SYߓQNÐ@A@oo@082 F!`d#A (TS{SQ=@ͩEOwˋ% iL/BMd[bI|?LE 70ƇҚᝤ8,,/s(, 8"n-T\VNܥE]kէi6D?gǚ:z.1 Ed}R Z:!7H j extract-4.0.0/testdata/archive.tar.gz000066400000000000000000000004411465515012500175510ustar00rootroot00000000000000W]0<*'7}XR07耩?=Kɪ?UB&Ḓ9^[X2dJsM:V_zk߿|U,F#1"߼iT og|l¿ ,$˙?Uߴxۀvs{D2ӡblm=Le!g4jmV'U{ iR\eL8 kη@b<]f 8*hڙvruYBW T/86.|rG)YEtxplOOU(vt.իMy;8 ~:`tP;ױgYZextract-4.0.0/testdata/archive.tar.zst000066400000000000000000000004221465515012500177500ustar00rootroot00000000000000(/d'%BI9 D4l`5VMI?$!sCt#f^zi)ԋnS+b*`OKݱw DXH$iXC}ws۶ܵm3B5_P!qA Bt B0pÁB3P@`d@wy?g@a Y.  Pؼ7 Z]m @F6y L  $U32extract-4.0.0/testdata/archive.zip000066400000000000000000000022511465515012500171470ustar00rootroot00000000000000PK YMIarchive/UT W'Xux PK /XOJarchive/folderlinkUT &X'Xux archive/folderPK 5dI2archive/link.txtUT ɾWɾWux File1 PK MdIarchive/file2.txtUT ɾWɾWux File2 PK 5dI2archive/file1.txtUT ɾWɾWux File1 PK WdIarchive/folder/UT ɾW&Xux PK WdIoXU archive/folder/file1.txtUT ɾWɾWux folder/File1 PK YMIAarchive/UTWux PK /XOJBarchive/folderlinkUT&Xux PK 5dI2archive/link.txtUTɾWux PK MdIarchive/file2.txtUTɾWux PK 5dI2=archive/file1.txtUTɾWux PK WdIAarchive/folder/UTɾWux PK WdIoXU archive/folder/file1.txtUTɾWux PK]6extract-4.0.0/testdata/permissions.tar000066400000000000000000000240001465515012500200610ustar00rootroot00000000000000dir500/0000500000175000017500000000000014553675315011065 5ustar chrischrisdir700/0000700000175000017500000000000014553675265011075 5ustar chrischrisdir750/0000750000175000017500000000000014553675263011105 5ustar chrischrisdir755/0000755000175000017500000000000014553675257011122 5ustar chrischrisdir775/0000775000175000017500000000000014553675256011125 5ustar chrischrisdir777/0000777000175000017500000000000014553675252011125 5ustar chrischrisfile4000000400000175000017500000000000014553675317011142 0ustar chrischrisfile6000000600000175000017500000000000014553675311011140 0ustar chrischrisfile6400000640000175000017500000000000014553675305011153 0ustar chrischrisfile6440000644000175000017500000000000014553675277011173 0ustar chrischrisfile6640000664000175000017500000000000014553675275011175 0ustar chrischrisfile6660000666000175000017500000000000014553675274011200 0ustar chrischrisextract-4.0.0/testdata/permissions.zip000066400000000000000000000032761465515012500201110ustar00rootroot00000000000000PK D7Xdir500/UT ze{eux PK D7Xdir700/UT zeC{eux PK D7Xdir750/UT zeC{eux PK D7Xdir755/UT zeC{eux PK D7Xdir775/UT zeC{eux PK D7Xdir777/UT zeC{eux PK D7Xfile400UT zezeux PK D7Xfile600UT zezeux PK D7Xfile640UT zezeux PK D7Xfile644UT zezeux PK D7Xfile664UT zezeux PK D7Xfile666UT zezeux PK D7X@Adir500/UTzeux PK D7XAAdir700/UTzeux PK D7XAdir750/UTzeux PK D7XAdir755/UTzeux PK D7XAdir775/UTzeux PK D7XAEdir777/UTzeux PK D7Xfile400UTzeux PK D7Xfile600UTzeux PK D7Xfile640UTzeux PK D7XIfile644UTzeux PK D7Xfile664UTzeux PK D7Xfile666UTzeux PK  extract-4.0.0/testdata/singlefile.bz2000066400000000000000000000000601465515012500175360ustar00rootroot00000000000000BZh91AY&SY\)A "z0 ;rE8P\)extract-4.0.0/testdata/singlefile.gz000066400000000000000000000000521465515012500174620ustar00rootroot00000000000000=Ysinglefile+KIMIے extract-4.0.0/testdata/zipslip/000077500000000000000000000000001465515012500164745ustar00rootroot00000000000000extract-4.0.0/testdata/zipslip/evil-link-traversal.tar000066400000000000000000000030001465515012500230700ustar00rootroot00000000000000leak0000777000000000000000000000000000000000000020640 1../../../../../../../../../../../../../../../tmp/something-importantustar0000000000000000extract-4.0.0/testdata/zipslip/evil-win.tar000066400000000000000000000240001465515012500207320ustar00rootroot00000000000000..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt0000666000000000000000000000000400000000000020720 0ustar0000000000000000TESTsome\path\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt0000666000000000000000000000000400000000000022751 0ustar0000000000000000TEST\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt0000666000000000000000000000000400000000000021054 0ustar0000000000000000TEST\some\path\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt0000666000000000000000000000000400000000000023105 0ustar0000000000000000TEST..0000666000000000000000000000000400000000000007341 0ustar0000000000000000TEST../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000017037 0ustar0000000000000000TESTsome/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000020736 0ustar0000000000000000TEST/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000017116 0ustar0000000000000000TEST/some/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000021015 0ustar0000000000000000TESTextract-4.0.0/testdata/zipslip/evil.tar000066400000000000000000000140001465515012500201360ustar00rootroot00000000000000..0000666000000000000000000000000400000000000007341 0ustar0000000000000000TEST../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000017037 0ustar0000000000000000TESTsome/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000020736 0ustar0000000000000000TEST/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000017116 0ustar0000000000000000TEST/some/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt0000666000000000000000000000000400000000000021015 0ustar0000000000000000TESTextract-4.0.0/testdata/zipslip/evil.zip000066400000000000000000000042101465515012500201540ustar00rootroot00000000000000PKH..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt q PK PKRsome\path\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt q PK PKI\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt q PK PKS\some\path\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txt q PK PK.. q PK PKH../../../../../../../../../../../../../../../../../../../../tmp/evil.txt q PK PKRsome/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt q PK PKI/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt q PK PKS/some/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txt q PK PK H..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txtPK Rsome\path\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txtPK I \..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txtPK S\some\path\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\tmp\evil.txtPK ..PK HP../../../../../../../../../../../../../../../../../../../../tmp/evil.txtPK Rsome/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txtPK IZ/../../../../../../../../../../../../../../../../../../../../tmp/evil.txtPK S/some/path/../../../../../../../../../../../../../../../../../../../../tmp/evil.txtPK fextract-4.0.0/umask_unix_test.go000066400000000000000000000004711465515012500167440ustar00rootroot00000000000000//go:build !windows package extract_test import "golang.org/x/sys/unix" func UnixUmaskZero() int { return unix.Umask(0) } func UnixUmask(userUmask int) { unix.Umask(userUmask) } func OsFilePerms(unixPerms uint64) uint64 { return unixPerms } func OsDirPerms(unixPerms uint64) uint64 { return unixPerms } extract-4.0.0/umask_windows_test.go000066400000000000000000000007011465515012500174470ustar00rootroot00000000000000//go:build windows package extract_test func UnixUmaskZero() int { return 0 } func UnixUmask(userUmask int) { } func OsFilePerms(unixPerms uint64) uint64 { // Go on Windows just uses 666/444 for files depending on whether "read only" is set globalPerms := unixPerms >> 6 return globalPerms | (globalPerms << 3) | (globalPerms << 6) } func OsDirPerms(unixPerms uint64) uint64 { // Go on Windows just uses 777 for directories return 0777 }