pax_global_header00006660000000000000000000000064144122566770014530gustar00rootroot0000000000000052 comment=e95f5c8e97d528449ccb2ebeb5e5266954eeb549 go-clone-1.6.0/000077500000000000000000000000001441225667700132375ustar00rootroot00000000000000go-clone-1.6.0/.github/000077500000000000000000000000001441225667700145775ustar00rootroot00000000000000go-clone-1.6.0/.github/workflows/000077500000000000000000000000001441225667700166345ustar00rootroot00000000000000go-clone-1.6.0/.github/workflows/go.yml000066400000000000000000000015561441225667700177730ustar00rootroot00000000000000name: Go on: push: branches: [master] pull_request: branches: [master] jobs: build: name: Build runs-on: ubuntu-latest steps: - name: Set up Go 1.x uses: actions/setup-go@v2 with: go-version: ^1.13 - name: Check out code into the Go module directory uses: actions/checkout@v2 - name: Get dependencies run: | go mod download go get - name: Test run: go test -v -coverprofile=covprofile.cov ./... - name: Test generic run: | cd generic go test -v ./... cd .. - name: Send coverage env: COVERALLS_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | go get github.com/mattn/goveralls go run github.com/mattn/goveralls -coverprofile=covprofile.cov -service=github go-clone-1.6.0/.gitignore000066400000000000000000000005751441225667700152360ustar00rootroot00000000000000# Compiled Object files, Static and Dynamic libs (Shared Objects) *.o *.a *.so # Folders _obj _test # Architecture specific extensions/prefixes *.[568vq] [568vq].out *.cgo1.go *.cgo2.c _cgo_defun.c _cgo_gotypes.go _cgo_export.* _testmain.go *.exe *.test *.prof # Intellij *.iml .idea/ # VS Code debug debug_test .vscode/ # Mac .DS_Store # go workspace go.work go.work.sum go-clone-1.6.0/LICENSE000066400000000000000000000020561441225667700142470ustar00rootroot00000000000000Copyright (c) 2019 Huan Du Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. go-clone-1.6.0/README.md000066400000000000000000000266111441225667700145240ustar00rootroot00000000000000# go-clone: Clone any Go data structure deeply and thoroughly [![Go](https://github.com/huandu/go-clone/workflows/Go/badge.svg)](https://github.com/huandu/go-clone/actions) [![Go Doc](https://godoc.org/github.com/huandu/go-clone?status.svg)](https://pkg.go.dev/github.com/huandu/go-clone) [![Go Report](https://goreportcard.com/badge/github.com/huandu/go-clone)](https://goreportcard.com/report/github.com/huandu/go-clone) [![Coverage Status](https://coveralls.io/repos/github/huandu/go-clone/badge.svg?branch=master)](https://coveralls.io/github/huandu/go-clone?branch=master) Package `clone` provides functions to deep clone any Go data. It also provides a wrapper to protect a pointer from any unexpected mutation. For users who use Go 1.18+, it's recommended to import `github.com/huandu/go-clone/generic` for generic APIs and arena support. `Clone`/`Slowly` can clone unexported fields and "no-copy" structs as well. Use this feature wisely. ## Install Use `go get` to install this package. ```shell go get github.com/huandu/go-clone ``` ## Usage ### `Clone` and `Slowly` If we want to clone any Go value, use `Clone`. ```go t := &T{...} v := clone.Clone(t).(*T) reflect.DeepEqual(t, v) // true ``` For the sake of performance, `Clone` doesn't deal with values containing pointer cycles. If we need to clone such values, use `Slowly` instead. ```go type ListNode struct { Data int Next *ListNode } node1 := &ListNode{ Data: 1, } node2 := &ListNode{ Data: 2, } node3 := &ListNode{ Data: 3, } node1.Next = node2 node2.Next = node3 node3.Next = node1 // We must use `Slowly` to clone a circular linked list. node := Slowly(node1).(*ListNode) for i := 0; i < 10; i++ { fmt.Println(node.Data) node = node.Next } ``` ### Generic APIs Starting from go1.18, Go started to support generic. With generic syntax, `Clone`/`Slowly` and other APIs can be called much cleaner like following. ```go import "github.com/huandu/go-clone/generic" type MyType struct { Foo string } original := &MyType{ Foo: "bar", } // The type of cloned is *MyType instead of interface{}. cloned := Clone(original) println(cloned.Foo) // Output: bar ``` It's required to update minimal Go version to 1.18 to opt-in generic syntax. It may not be a wise choice to update this package's `go.mod` and drop so many old Go compilers for such syntax candy. Therefore, I decide to create a new standalone package `github.com/huandu/go-clone/generic` to provide APIs with generic syntax. For new users who use Go 1.18+, the generic package is preferred and recommended. ### Arena support Starting from Go1.20, arena is introduced as a new way to allocate memory. It's quite useful to improve overall performance in special scenarios. In order to clone a value with memory allocated from an arena, there are new methods `ArenaClone` and `ArenaCloneSlowly` available in `github.com/huandu/go-clone/generic`. ```go // ArenaClone recursively deep clones v to a new value in arena a. // It works in the same way as Clone, except it allocates all memory from arena. func ArenaClone[T any](a *arena.Arena, v T) (nv T) // ArenaCloneSlowly recursively deep clones v to a new value in arena a. // It works in the same way as Slowly, except it allocates all memory from arena. func ArenaCloneSlowly[T any](a *arena.Arena, v T) (nv T) ``` Due to limitations in arena API, memory of the internal data structure of `map` and `chan` is always allocated in heap by Go runtime ([see this issue](https://github.com/golang/go/issues/56230)). **Warning**: Per [discussion in the arena proposal](https://github.com/golang/go/issues/51317), the arena package may be changed incompatibly or removed in future. All arena related APIs in this package will be changed accordingly. ### Memory allocations and the `Allocator` The `Allocator` is designed to allocate memory when cloning. It's also used to hold all customizations, e.g. custom clone functions, scalar types and opaque pointers, etc. There is a default allocator which allocates memory from heap. Almost all public APIs in this package use this default allocator to do their job. We can control how to allocate memory by creating a new `Allocator` by `NewAllocator`. It enables us to take full control over memory allocation when cloning. See [Allocator sample code](https://pkg.go.dev/github.com/huandu/go-clone#example-Allocator) to understand how to customize an allocator. Let's take a closer look at the `NewAllocator` function. ```go func NewAllocator(pool unsafe.Pointer, methods *AllocatorMethods) *Allocator ``` - The first parameter `pool` is a pointer to a memory pool. It's used to allocate memory for cloning. It can be `nil` if we don't need a memory pool. - The second parameter `methods` is a pointer to a struct which contains all methods to allocate memory. It can be `nil` if we don't need to customize memory allocation. - The `Allocator` struct is allocated from the `methods.New` or the `methods.Parent` allocator or from heap. The `Parent` in `AllocatorMethods` is used to indicate the parent of the new allocator. With this feature, we can orgnize allocators into a tree structure. All customizations, including custom clone functions, scalar types and opaque pointers, etc, are inherited from parent allocators. There are some APIs designed for convenience. - We can create dedicated allocators for heap or arena by calling `FromHeap()` or `FromArena(a *arena.Arena)`. - We can call `MakeCloner(allocator)` to create a helper struct with `Clone` and `CloneSlowly` methods in which the type of in and out parameters is `interface{}`. ### Mark struct type as scalar Some struct types can be considered as scalar. A well-known case is `time.Time`. Although there is a pointer `loc *time.Location` inside `time.Time`, we always use `time.Time` by value in all methods. When cloning `time.Time`, it should be OK to return a shadow copy. Currently, following types are marked as scalar by default. - `time.Time` - `reflect.Value` If there is any type defined in built-in package should be considered as scalar, please open new issue to let me know. I will update the default. If there is any custom type should be considered as scalar, call `MarkAsScalar` to mark it manually. See [MarkAsScalar sample code](https://pkg.go.dev/github.com/huandu/go-clone#example-MarkAsScalar) for more details. ### Mark pointer type as opaque Some pointer values are used as enumerable const values. A well-known case is `elliptic.Curve`. In package `crypto/tls`, curve type of a certificate is checked by comparing values to pre-defined curve values, e.g. `elliptic.P521()`. In this case, the curve values, which are pointers or structs, cannot be cloned deeply. Currently, following types are marked as scalar by default. - `elliptic.Curve`, which is `*elliptic.CurveParam` or `elliptic.p256Curve`. - `reflect.Type`, which is `*reflect.rtype` defined in `runtime`. If there is any pointer type defined in built-in package should be considered as opaque, please open new issue to let me know. I will update the default. If there is any custom pointer type should be considered as opaque, call `MarkAsOpaquePointer` to mark it manually. See [MarkAsOpaquePointer sample code](https://pkg.go.dev/github.com/huandu/go-clone#example-MarkAsOpaquePointer) for more details. ### Clone "no-copy" types defined in `sync` and `sync/atomic` There are some "no-copy" types like `sync.Mutex`, `atomic.Value`, etc. They cannot be cloned by copying all fields one by one, but we can alloc a new zero value and call methods to do proper initialization. Currently, all "no-copy" types defined in `sync` and `sync/atomic` can be cloned properly using following strategies. - `sync.Mutex`: Cloned value is a newly allocated zero mutex. - `sync.RWMutex`: Cloned value is a newly allocated zero mutex. - `sync.WaitGroup`: Cloned value is a newly allocated zero wait group. - `sync.Cond`: Cloned value is a cond with a newly allocated zero lock. - `sync.Pool`: Cloned value is an empty pool with the same `New` function. - `sync.Map`: Cloned value is a sync map with cloned key/value pairs. - `sync.Once`: Cloned value is a once type with the same done flag. - `atomic.Value`/`atomic.Bool`/`atomic.Int32`/`atomic.Int64`/`atomic.Uint32`/`atomic.Uint64`/`atomic.Uintptr`: Cloned value is a new atomic value with the same value. If there is any type defined in built-in package should be considered as "no-copy" types, please open new issue to let me know. I will update the default. ### Set custom clone functions If default clone strategy doesn't work for a struct type, we can call `SetCustomFunc` to register a custom clone function. ```go SetCustomFunc(reflect.TypeOf(MyType{}), func(allocator *Allocator, old, new reflect.Value) { // Customized logic to copy the old to the new. // The old's type is MyType. // The new is a zero value of MyType and new.CanAddr() always returns true. }) ``` We can use `allocator` to clone any value or allocate new memory. It's allowed to call `allocator.Clone` or `allocator.CloneSlowly` on `old` to clone its struct fields in depth without worrying about dead loop. See [SetCustomFunc sample code](https://pkg.go.dev/github.com/huandu/go-clone#example-SetCustomFunc) for more details. ### Clone `atomic.Pointer[T]` As there is no way to predefine a custom clone function for generic type `atomic.Pointer[T]`, cloning such atomic type is not supported by default. If we want to support it, we need to register a custom clone function manually. Suppose we instantiate `atomic.Pointer[T]` with type `MyType1` and `MyType2` in a project, and then we can register custom clone functions like following. ```go import "github.com/huandu/go-clone/generic" func init() { // Register all instantiated atomic.Pointer[T] types in this project. clone.RegisterAtomicPointer[MyType1]() clone.RegisterAtomicPointer[MyType2]() } ``` ### `Wrap`, `Unwrap` and `Undo` Package `clone` provides `Wrap`/`Unwrap` functions to protect a pointer value from any unexpected mutation. It's useful when we want to protect a variable which should be immutable by design, e.g. global config, the value stored in context, the value sent to a chan, etc. ```go // Suppose we have a type T defined as following. // type T struct { // Foo int // } v := &T{ Foo: 123, } w := Wrap(v).(*T) // Wrap value to protect it. // Use w freely. The type of w is the same as that of v. // It's OK to modify w. The change will not affect v. w.Foo = 456 fmt.Println(w.Foo) // 456 fmt.Println(v.Foo) // 123 // Once we need the original value stored in w, call `Unwrap`. orig := Unwrap(w).(*T) fmt.Println(orig == v) // true fmt.Println(orig.Foo) // 123 // Or, we can simply undo any change made in w. // Note that `Undo` is significantly slower than `Unwrap`, thus // the latter is always preferred. Undo(w) fmt.Println(w.Foo) // 123 ``` ## Performance Here is the performance data running on my dev machine. ```text go 1.20.1 goos: darwin goarch: amd64 cpu: Intel(R) Core(TM) i7-9750H CPU @ 2.60GHz BenchmarkSimpleClone-12 7164530 156.7 ns/op 24 B/op 1 allocs/op BenchmarkComplexClone-12 628056 1871 ns/op 1488 B/op 21 allocs/op BenchmarkUnwrap-12 15498139 78.02 ns/op 0 B/op 0 allocs/op BenchmarkSimpleWrap-12 3882360 309.7 ns/op 72 B/op 2 allocs/op BenchmarkComplexWrap-12 949654 1245 ns/op 736 B/op 15 allocs/op ``` ## License This package is licensed under MIT license. See LICENSE for details. go-clone-1.6.0/allocator.go000066400000000000000000000166741441225667700155640ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "reflect" "runtime" "sync" "unsafe" ) var typeOfAllocator = reflect.TypeOf(Allocator{}) // defaultAllocator is the default allocator and allocates memory from heap. var defaultAllocator = &Allocator{ new: heapNew, makeSlice: heapMakeSlice, makeMap: heapMakeMap, makeChan: heapMakeChan, isScalar: IsScalar, } // Allocator is a utility type for memory allocation. type Allocator struct { parent *Allocator pool unsafe.Pointer new func(pool unsafe.Pointer, t reflect.Type) reflect.Value makeSlice func(pool unsafe.Pointer, t reflect.Type, len, cap int) reflect.Value makeMap func(pool unsafe.Pointer, t reflect.Type, n int) reflect.Value makeChan func(pool unsafe.Pointer, t reflect.Type, buffer int) reflect.Value isScalar func(t reflect.Kind) bool cachedStructTypes sync.Map cachedPointerTypes sync.Map cachedCustomFuncTypes sync.Map } // FromHeap creates an allocator which allocate memory from heap. func FromHeap() *Allocator { return NewAllocator(nil, nil) } // NewAllocator creates an allocator which allocate memory from the pool. // Both pool and methods are optional. // // If methods.New is not nil, the allocator itself is created by calling methods.New. // // The pool is a pointer to the memory pool which is opaque to the allocator. // It's methods's responsibility to allocate memory from the pool properly. func NewAllocator(pool unsafe.Pointer, methods *AllocatorMethods) (allocator *Allocator) { parent := methods.parent() new := methods.new(parent, pool) // Allocate the allocator from the pool. val := new(pool, typeOfAllocator) allocator = (*Allocator)(unsafe.Pointer(val.Pointer())) runtime.KeepAlive(val) allocator.pool = pool allocator.new = new allocator.makeSlice = methods.makeSlice(parent, pool) allocator.makeMap = methods.makeMap(parent, pool) allocator.makeChan = methods.makeChan(parent, pool) allocator.isScalar = methods.isScalar(parent) if parent == nil { parent = defaultAllocator } allocator.parent = parent return } // New returns a new zero value of t. func (a *Allocator) New(t reflect.Type) reflect.Value { return a.new(a.pool, t) } // MakeSlice creates a new zero-initialized slice value of t with len and cap. func (a *Allocator) MakeSlice(t reflect.Type, len, cap int) reflect.Value { return a.makeSlice(a.pool, t, len, cap) } // MakeMap creates a new map with minimum size n. func (a *Allocator) MakeMap(t reflect.Type, n int) reflect.Value { return a.makeMap(a.pool, t, n) } // MakeChan creates a new chan with buffer. func (a *Allocator) MakeChan(t reflect.Type, buffer int) reflect.Value { return a.makeChan(a.pool, t, buffer) } // Clone recursively deep clone val to a new value with memory allocated from a. func (a *Allocator) Clone(val reflect.Value) reflect.Value { return a.clone(val, true) } func (a *Allocator) clone(val reflect.Value, inCustomFunc bool) reflect.Value { if !val.IsValid() { return val } state := &cloneState{ allocator: a, } if inCustomFunc { state.skipCustomFuncValue = val } return state.clone(val) } // CloneSlowly recursively deep clone val to a new value with memory allocated from a. // It marks all cloned values internally, thus it can clone v with cycle pointer. func (a *Allocator) CloneSlowly(val reflect.Value) reflect.Value { return a.cloneSlowly(val, true) } func (a *Allocator) cloneSlowly(val reflect.Value, inCustomFunc bool) reflect.Value { if !val.IsValid() { return val } state := &cloneState{ allocator: a, visited: visitMap{}, invalid: invalidPointers{}, } if inCustomFunc { state.skipCustomFuncValue = val } cloned := state.clone(val) state.fix(cloned) return cloned } func (a *Allocator) loadStructType(t reflect.Type) (st structType) { st, ok := a.lookupStructType(t) if ok { return } num := t.NumField() pointerFields := make([]structFieldType, 0, num) // Find pointer fields in depth-first order. for i := 0; i < num; i++ { field := t.Field(i) ft := field.Type k := ft.Kind() if a.isScalar(k) { continue } switch k { case reflect.Array: if ft.Len() == 0 { continue } elem := ft.Elem() if a.isScalar(elem.Kind()) { continue } if elem.Kind() == reflect.Struct { if fst := a.loadStructType(elem); fst.CanShadowCopy() { continue } } case reflect.Struct: if fst := a.loadStructType(ft); fst.CanShadowCopy() { continue } } pointerFields = append(pointerFields, structFieldType{ Offset: field.Offset, Index: i, }) } if len(pointerFields) == 0 { pointerFields = nil // Release memory ASAP. } st = structType{ PointerFields: pointerFields, } // Load custom function. current := a for current != nil { if fn, ok := current.cachedCustomFuncTypes.Load(t); ok { st.fn = fn.(Func) break } current = current.parent } a.cachedStructTypes.LoadOrStore(t, st) return } func (a *Allocator) lookupStructType(t reflect.Type) (st structType, ok bool) { var v interface{} current := a for current != nil { v, ok = current.cachedStructTypes.Load(t) if ok { st = v.(structType) return } current = current.parent } return } func (a *Allocator) isOpaquePointer(t reflect.Type) (ok bool) { current := a for current != nil { if _, ok = current.cachedPointerTypes.Load(t); ok { return } current = current.parent } return } // MarkAsScalar marks t as a scalar type so that all clone methods will copy t by value. // If t is not struct or pointer to struct, MarkAsScalar ignores t. // // In the most cases, it's not necessary to call it explicitly. // If a struct type contains scalar type fields only, the struct will be marked as scalar automatically. // // Here is a list of types marked as scalar by default: // - time.Time // - reflect.Value func (a *Allocator) MarkAsScalar(t reflect.Type) { for t.Kind() == reflect.Ptr { t = t.Elem() } if t.Kind() != reflect.Struct { return } a.cachedStructTypes.Store(t, zeroStructType) } // MarkAsOpaquePointer marks t as an opaque pointer so that all clone methods will copy t by value. // If t is not a pointer, MarkAsOpaquePointer ignores t. // // Here is a list of types marked as opaque pointers by default: // - `elliptic.Curve`, which is `*elliptic.CurveParam` or `elliptic.p256Curve`; // - `reflect.Type`, which is `*reflect.rtype` defined in `runtime`. func (a *Allocator) MarkAsOpaquePointer(t reflect.Type) { if t.Kind() != reflect.Ptr { return } a.cachedPointerTypes.Store(t, struct{}{}) } // SetCustomFunc sets a custom clone function for type t. // If t is not struct or pointer to struct, SetCustomFunc ignores t. // // If fn is nil, remove the custom clone function for type t. func (a *Allocator) SetCustomFunc(t reflect.Type, fn Func) { if fn == nil { a.cachedCustomFuncTypes.Delete(t) return } for t.Kind() == reflect.Ptr { t = t.Elem() } if t.Kind() != reflect.Struct { return } a.cachedCustomFuncTypes.Store(t, fn) } func heapNew(pool unsafe.Pointer, t reflect.Type) reflect.Value { return reflect.New(t) } func heapMakeSlice(pool unsafe.Pointer, t reflect.Type, len, cap int) reflect.Value { return reflect.MakeSlice(t, len, cap) } func heapMakeMap(pool unsafe.Pointer, t reflect.Type, n int) reflect.Value { return reflect.MakeMapWithSize(t, n) } func heapMakeChan(pool unsafe.Pointer, t reflect.Type, buffer int) reflect.Value { return reflect.MakeChan(t, buffer) } go-clone-1.6.0/allocator_sample_test.go000066400000000000000000000052111441225667700201450ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. //go:build !goexperiment.arenas package clone import ( "fmt" "reflect" "runtime" "sync" "unsafe" ) func ExampleAllocator() { // We can create a new allocator to hold customized config without poluting the default allocator. // Calling FromHeap() is a convenient way to create a new allocator which allocates memory from heap. allocator := FromHeap() // Mark T as scalar only in the allocator. type T struct { Value *int } allocator.MarkAsScalar(reflect.TypeOf(new(T))) t := &T{ Value: new(int), } cloned1 := allocator.Clone(reflect.ValueOf(t)).Interface().(*T) cloned2 := Clone(t).(*T) fmt.Println(t.Value == cloned1.Value) fmt.Println(t.Value == cloned2.Value) // Output: // true // false } func ExampleAllocator_syncPool() { type Foo struct { Bar int } typeOfFoo := reflect.TypeOf(Foo{}) poolUsed := 0 // For test only. // A sync pool to allocate Foo. p := &sync.Pool{ New: func() interface{} { return &Foo{} }, } // Creates a custom allocator using p as pool. allocator := NewAllocator(unsafe.Pointer(p), &AllocatorMethods{ New: func(pool unsafe.Pointer, t reflect.Type) reflect.Value { // If t is Foo, allocate value from the sync pool p. if t == typeOfFoo { poolUsed++ // For test only. p := (*sync.Pool)(pool) v := p.Get() runtime.SetFinalizer(v, func(v *Foo) { *v = Foo{} p.Put(v) }) return reflect.ValueOf(v) } // Fallback to reflect API. return reflect.New(t) }, }) // Do clone. target := []*Foo{ {Bar: 1}, {Bar: 2}, } cloned := allocator.Clone(reflect.ValueOf(target)).Interface().([]*Foo) fmt.Println(reflect.DeepEqual(target, cloned)) fmt.Println(poolUsed) // Output: // true // 2 } func ExampleAllocator_deepCloneString() { // By default, string is considered as scalar and copied by value. // In some cases, we may need to clone string deeply, that is, copy the underlying bytes. // We can use a custom allocator to do this. allocator := NewAllocator(nil, &AllocatorMethods{ IsScalar: func(t reflect.Kind) bool { return t != reflect.String && IsScalar(t) }, }) cloner := MakeCloner(allocator) data := []byte("bytes") s1 := *(*string)(unsafe.Pointer(&data)) // Unsafe conversion from []byte to string. s2 := Clone(s1).(string) // s2 shares the same underlying bytes with s1. s3 := cloner.Clone(s1).(string) // s3 has its own underlying bytes. copy(data, "magic") // Change the underlying bytes. fmt.Println(s1) fmt.Println(s2) fmt.Println(s3) // Output: // magic // magic // bytes } go-clone-1.6.0/allocator_test.go000066400000000000000000000033531441225667700166110ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "reflect" "testing" "unsafe" "github.com/huandu/go-assert" ) func TestAllocatorClone(t *testing.T) { a := assert.New(t) cnt := 0 allocator := NewAllocator(nil, &AllocatorMethods{ New: func(pool unsafe.Pointer, t reflect.Type) reflect.Value { cnt++ return heapNew(pool, t) }, }) type dataNode struct { Data int Next *dataNode } data := &dataNode{ Data: 1, Next: &dataNode{ Data: 2, }, } cloned := allocator.Clone(reflect.ValueOf(data)).Interface().(*dataNode) a.Equal(data, cloned) // Should allocate following value. // - allocator // - data // - data.Next a.Equal(cnt, 3) } func TestAllocatorCloneSlowly(t *testing.T) { a := assert.New(t) cnt := 0 allocator := NewAllocator(nil, &AllocatorMethods{ New: func(pool unsafe.Pointer, t reflect.Type) reflect.Value { cnt++ return heapNew(pool, t) }, }) type dataNode struct { Data int Next *dataNode } // data is a cycle linked list. data := &dataNode{ Data: 1, Next: &dataNode{ Data: 2, Next: &dataNode{ Data: 3, }, }, } data.Next.Next.Next = data cloned := allocator.CloneSlowly(reflect.ValueOf(data)).Interface().(*dataNode) a.Equal(data.Data, cloned.Data) a.Equal(data.Next.Data, cloned.Next.Data) a.Equal(data.Next.Next.Data, cloned.Next.Next.Data) a.Equal(data.Next.Next.Next.Data, cloned.Next.Next.Next.Data) a.Equal(data.Next.Next.Next.Next.Data, cloned.Next.Next.Next.Next.Data) a.Assert(cloned.Next.Next.Next == cloned) // Should allocate following value. // - allocator // - data // - data.Next // - data.Next.Next a.Equal(cnt, 4) } go-clone-1.6.0/allocatormethods.go000066400000000000000000000056411441225667700171400ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "reflect" "unsafe" ) // AllocatorMethods defines all methods required by allocator. // If any of these methods is nil, allocator will use default method which allocates memory from heap. type AllocatorMethods struct { // Parent is the allocator which handles all unhandled methods. // If it's nil, it will be the default allocator. Parent *Allocator New func(pool unsafe.Pointer, t reflect.Type) reflect.Value MakeSlice func(pool unsafe.Pointer, t reflect.Type, len, cap int) reflect.Value MakeMap func(pool unsafe.Pointer, t reflect.Type, n int) reflect.Value MakeChan func(pool unsafe.Pointer, t reflect.Type, buffer int) reflect.Value IsScalar func(k reflect.Kind) bool } func (am *AllocatorMethods) parent() *Allocator { if am != nil && am.Parent != nil { return am.Parent } return nil } func (am *AllocatorMethods) new(parent *Allocator, pool unsafe.Pointer) func(pool unsafe.Pointer, t reflect.Type) reflect.Value { if am != nil && am.New != nil { return am.New } if parent != nil { if parent.pool == pool { return parent.new } else { return func(pool unsafe.Pointer, t reflect.Type) reflect.Value { return parent.New(t) } } } return defaultAllocator.new } func (am *AllocatorMethods) makeSlice(parent *Allocator, pool unsafe.Pointer) func(pool unsafe.Pointer, t reflect.Type, len, cap int) reflect.Value { if am != nil && am.MakeSlice != nil { return am.MakeSlice } if parent != nil { if parent.pool == pool { return parent.makeSlice } else { return func(pool unsafe.Pointer, t reflect.Type, len, cap int) reflect.Value { return parent.MakeSlice(t, len, cap) } } } return defaultAllocator.makeSlice } func (am *AllocatorMethods) makeMap(parent *Allocator, pool unsafe.Pointer) func(pool unsafe.Pointer, t reflect.Type, n int) reflect.Value { if am != nil && am.MakeMap != nil { return am.MakeMap } if parent != nil { if parent.pool == pool { return parent.makeMap } else { return func(pool unsafe.Pointer, t reflect.Type, n int) reflect.Value { return parent.MakeMap(t, n) } } } return defaultAllocator.makeMap } func (am *AllocatorMethods) makeChan(parent *Allocator, pool unsafe.Pointer) func(pool unsafe.Pointer, t reflect.Type, buffer int) reflect.Value { if am != nil && am.MakeChan != nil { return am.MakeChan } if parent != nil { if parent.pool == pool { return parent.makeChan } else { return func(pool unsafe.Pointer, t reflect.Type, buffer int) reflect.Value { return parent.MakeChan(t, buffer) } } } return defaultAllocator.makeChan } func (am *AllocatorMethods) isScalar(parent *Allocator) func(t reflect.Kind) bool { if am != nil && am.IsScalar != nil { return am.IsScalar } if parent != nil { return parent.isScalar } return defaultAllocator.isScalar } go-clone-1.6.0/allocatormethods_test.go000066400000000000000000000061471441225667700202010ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "reflect" "sync" "testing" "unsafe" "github.com/huandu/go-assert" ) func TestAllocatorMethodsParent(t *testing.T) { a := assert.New(t) parent := NewAllocator(nil, &AllocatorMethods{ IsScalar: func(k reflect.Kind) bool { return k == reflect.Int }, }) allocator := NewAllocator(nil, &AllocatorMethods{ Parent: parent, }) a.Assert(parent.parent == defaultAllocator) a.Assert(allocator.parent == parent) // Set up customizations in parent. type T1 struct { Data []byte } type T2 struct { Data []byte } type T3 struct { Data []byte } typeOfT1 := reflect.TypeOf(new(T1)) typeOfT2 := reflect.TypeOf(new(T2)) typeOfT3 := reflect.TypeOf(new(T3)) customFuncCalled := 0 parent.MarkAsScalar(typeOfT1) parent.MarkAsOpaquePointer(typeOfT2) parent.SetCustomFunc(typeOfT3, func(allocator *Allocator, old, new reflect.Value) { customFuncCalled++ }) // All customizations should be inherited from parent. st1 := allocator.loadStructType(typeOfT1.Elem()) st2 := allocator.loadStructType(typeOfT2.Elem()) st3 := allocator.loadStructType(typeOfT3.Elem()) a.Equal(len(st1.PointerFields), 0) a.Assert(st1.fn == nil) a.Equal(len(st3.PointerFields), 1) a.Assert(st2.fn == nil) a.Equal(len(st3.PointerFields), 1) a.Assert(st3.fn != nil) a.Assert(!allocator.isOpaquePointer(typeOfT1)) a.Assert(allocator.isOpaquePointer(typeOfT2)) a.Assert(!allocator.isOpaquePointer(typeOfT3)) a.Assert(allocator.isScalar(reflect.Int)) a.Assert(!allocator.isScalar(reflect.Uint)) } func TestAllocatorMethodsPool(t *testing.T) { a := assert.New(t) pool1Called := 0 pool1 := &sync.Pool{ New: func() interface{} { pool1Called++ return nil }, } pool2Called := 0 pool2 := &sync.Pool{ New: func() interface{} { pool2Called++ return nil }, } parent := NewAllocator(unsafe.Pointer(pool1), &AllocatorMethods{ New: func(pool unsafe.Pointer, t reflect.Type) reflect.Value { p := (*sync.Pool)(pool) p.Get() return defaultAllocator.New(t) }, MakeSlice: func(pool unsafe.Pointer, t reflect.Type, len, cap int) reflect.Value { p := (*sync.Pool)(pool) p.Get() return defaultAllocator.MakeSlice(t, len, cap) }, MakeMap: func(pool unsafe.Pointer, t reflect.Type, size int) reflect.Value { p := (*sync.Pool)(pool) p.Get() return defaultAllocator.MakeMap(t, size) }, }) allocator := NewAllocator(unsafe.Pointer(pool2), &AllocatorMethods{ Parent: parent, MakeChan: func(pool unsafe.Pointer, t reflect.Type, size int) reflect.Value { p := (*sync.Pool)(pool) p.Get() return defaultAllocator.MakeChan(t, size) }, }) // All allocation should be implemented by parent. allocator.New(reflect.TypeOf(1)) allocator.MakeSlice(reflect.TypeOf([]int{}), 0, 0) allocator.MakeMap(reflect.TypeOf(map[int]int{}), 0) allocator.MakeChan(reflect.TypeOf(make(chan int)), 0) // 1 for new parent allocator itself. // 1 for new allocator itself. // 3 for New, MakeSlice and MakeMap. a.Equal(pool1Called, 5) // 1 for MakeChan. a.Equal(pool2Called, 1) } go-clone-1.6.0/arena.go000066400000000000000000000003721441225667700146560ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. //go:build !(go1.20 && goexperiment.arenas) // +build !go1.20 !goexperiment.arenas package clone const arenaIsEnabled = false go-clone-1.6.0/arena_go120.go000066400000000000000000000003641441225667700155670ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. //go:build go1.20 && goexperiment.arenas // +build go1.20,goexperiment.arenas package clone const arenaIsEnabled = true go-clone-1.6.0/atomic_go119.go000066400000000000000000000042371441225667700157700ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. //go:build go1.19 // +build go1.19 package clone import ( "reflect" "sync/atomic" ) func init() { SetCustomFunc(reflect.TypeOf(atomic.Bool{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Bool. oldValue := old.Addr().Interface().(*atomic.Bool) newValue := new.Addr().Interface().(*atomic.Bool) v := oldValue.Load() newValue.Store(v) }) SetCustomFunc(reflect.TypeOf(atomic.Int32{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Int32. oldValue := old.Addr().Interface().(*atomic.Int32) newValue := new.Addr().Interface().(*atomic.Int32) v := oldValue.Load() newValue.Store(v) }) SetCustomFunc(reflect.TypeOf(atomic.Int64{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Int64. oldValue := old.Addr().Interface().(*atomic.Int64) newValue := new.Addr().Interface().(*atomic.Int64) v := oldValue.Load() newValue.Store(v) }) SetCustomFunc(reflect.TypeOf(atomic.Uint32{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Uint32. oldValue := old.Addr().Interface().(*atomic.Uint32) newValue := new.Addr().Interface().(*atomic.Uint32) v := oldValue.Load() newValue.Store(v) }) SetCustomFunc(reflect.TypeOf(atomic.Uint64{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Uint64. oldValue := old.Addr().Interface().(*atomic.Uint64) newValue := new.Addr().Interface().(*atomic.Uint64) v := oldValue.Load() newValue.Store(v) }) SetCustomFunc(reflect.TypeOf(atomic.Uintptr{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Uintptr. oldValue := old.Addr().Interface().(*atomic.Uintptr) newValue := new.Addr().Interface().(*atomic.Uintptr) v := oldValue.Load() newValue.Store(v) }) } go-clone-1.6.0/clone.go000066400000000000000000000435011441225667700146710ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. // Package clone provides functions to deep clone any Go data. // It also provides a wrapper to protect a pointer from any unexpected mutation. package clone import ( "fmt" "reflect" "unsafe" ) var heapCloneState = &cloneState{ allocator: defaultAllocator, } var cloner = MakeCloner(defaultAllocator) // Clone recursively deep clone v to a new value in heap. // It assumes that there is no pointer cycle in v, // e.g. v has a pointer points to v itself. // If there is a pointer cycle, use Slowly instead. // // Clone allocates memory and deeply copies values inside v in depth-first sequence. // There are a few special rules for following types. // // - Scalar types: all number-like types are copied by value. // - func: Copied by value as func is an opaque pointer at runtime. // - string: Copied by value as string is immutable by design. // - unsafe.Pointer: Copied by value as we don't know what's in it. // - chan: A new empty chan is created as we cannot read data inside the old chan. // // Unlike many other packages, Clone is able to clone unexported fields of any struct. // Use this feature wisely. func Clone(v interface{}) interface{} { return cloner.Clone(v) } func clone(allocator *Allocator, v interface{}) interface{} { if v == nil { return nil } val := reflect.ValueOf(v) cloned := allocator.clone(val, false) return cloned.Interface() } // Slowly recursively deep clone v to a new value in heap. // It marks all cloned values internally, thus it can clone v with cycle pointer. // // Slowly works exactly the same as Clone. See Clone doc for more details. func Slowly(v interface{}) interface{} { return cloner.CloneSlowly(v) } func cloneSlowly(allocator *Allocator, v interface{}) interface{} { if v == nil { return nil } val := reflect.ValueOf(v) cloned := allocator.cloneSlowly(val, false) return cloned.Interface() } type cloneState struct { allocator *Allocator visited visitMap invalid invalidPointers // The value that should not be cloned by custom func. // It's useful to avoid infinite loop when custom func calls allocator.Clone(). skipCustomFuncValue reflect.Value } type visit struct { p uintptr extra int t reflect.Type } type visitMap map[visit]reflect.Value type invalidPointers map[visit]reflect.Value func (state *cloneState) clone(v reflect.Value) reflect.Value { if state.allocator.isScalar(v.Kind()) { return copyScalarValue(v) } switch v.Kind() { case reflect.Array: return state.cloneArray(v) case reflect.Chan: return state.allocator.MakeChan(v.Type(), v.Cap()) case reflect.Interface: return state.cloneInterface(v) case reflect.Map: return state.cloneMap(v) case reflect.Ptr: return state.clonePtr(v) case reflect.Slice: return state.cloneSlice(v) case reflect.Struct: return state.cloneStruct(v) case reflect.String: return state.cloneString(v) default: panic(fmt.Errorf("go-clone: unsupported type `%v`", v.Type())) } } func (state *cloneState) cloneArray(v reflect.Value) reflect.Value { dst := state.allocator.New(v.Type()) state.copyArray(v, dst) return dst.Elem() } func (state *cloneState) copyArray(src, nv reflect.Value) { p := unsafe.Pointer(nv.Pointer()) // dst must be a Ptr. dst := nv.Elem() num := src.Len() if state.allocator.isScalar(src.Type().Elem().Kind()) { shadowCopy(src, p) return } for i := 0; i < num; i++ { dst.Index(i).Set(state.clone(src.Index(i))) } } func (state *cloneState) cloneInterface(v reflect.Value) reflect.Value { if v.IsNil() { return reflect.Zero(v.Type()) } t := v.Type() elem := v.Elem() return state.clone(elem).Convert(elem.Type()).Convert(t) } func (state *cloneState) cloneMap(v reflect.Value) reflect.Value { if v.IsNil() { return reflect.Zero(v.Type()) } t := v.Type() if state.visited != nil { vst := visit{ p: v.Pointer(), t: t, } if val, ok := state.visited[vst]; ok { return val } } nv := state.allocator.MakeMap(t, v.Len()) if state.visited != nil { vst := visit{ p: v.Pointer(), t: t, } state.visited[vst] = nv } for iter := mapIter(v); iter.Next(); { key := state.clone(iter.Key()) value := state.clone(iter.Value()) nv.SetMapIndex(key, value) } return nv } func (state *cloneState) clonePtr(v reflect.Value) reflect.Value { if v.IsNil() { return reflect.Zero(v.Type()) } t := v.Type() if state.allocator.isOpaquePointer(t) { if v.CanInterface() { return v } ptr := state.allocator.New(t) p := unsafe.Pointer(ptr.Pointer()) shadowCopy(v, p) return ptr.Elem() } if state.visited != nil { vst := visit{ p: v.Pointer(), t: t, } if val, ok := state.visited[vst]; ok { return val } } src := v.Elem() elemType := src.Type() elemKind := src.Kind() nv := state.allocator.New(elemType) if state.visited != nil { vst := visit{ p: v.Pointer(), t: t, } state.visited[vst] = nv } switch elemKind { case reflect.Struct: state.copyStruct(src, nv) case reflect.Array: state.copyArray(src, nv) default: nv.Elem().Set(state.clone(src)) } // If this pointer is the address of a struct field and it's a cycle pointer, // it may be updated. if state.visited != nil { vst := visit{ p: v.Pointer(), t: t, } nv = state.visited[vst] } return nv } func (state *cloneState) cloneSlice(v reflect.Value) reflect.Value { if v.IsNil() { return reflect.Zero(v.Type()) } t := v.Type() num := v.Len() if state.visited != nil { vst := visit{ p: v.Pointer(), extra: num, t: t, } if val, ok := state.visited[vst]; ok { return val } } c := v.Cap() nv := state.allocator.MakeSlice(t, num, c) if state.visited != nil { vst := visit{ p: v.Pointer(), extra: num, t: t, } state.visited[vst] = nv } // For scalar slice, copy underlying values directly. if state.allocator.isScalar(t.Elem().Kind()) { src := unsafe.Pointer(v.Pointer()) dst := unsafe.Pointer(nv.Pointer()) sz := int(t.Elem().Size()) l := num * sz cc := c * sz copy((*[maxByteSize]byte)(dst)[:l:cc], (*[maxByteSize]byte)(src)[:l:cc]) } else { for i := 0; i < num; i++ { nv.Index(i).Set(state.clone(v.Index(i))) } } return nv } func (state *cloneState) cloneStruct(v reflect.Value) reflect.Value { t := v.Type() nv := state.allocator.New(t) state.copyStruct(v, nv) return nv.Elem() } var typeOfByteSlice = reflect.TypeOf([]byte(nil)) func (state *cloneState) cloneString(v reflect.Value) reflect.Value { t := v.Type() l := v.Len() data := state.allocator.MakeSlice(typeOfByteSlice, l, l) // The v is an unexported struct field. if !v.CanInterface() { v = reflect.ValueOf(v.String()) } reflect.Copy(data, v) nv := state.allocator.New(t) slice := data.Interface().([]byte) *(*stringHeader)(unsafe.Pointer(nv.Pointer())) = *(*stringHeader)(unsafe.Pointer(&slice)) return nv.Elem() } func (state *cloneState) copyStruct(src, nv reflect.Value) { t := src.Type() st := state.allocator.loadStructType(t) ptr := unsafe.Pointer(nv.Pointer()) if st.Init(state.allocator, src, nv, state.skipCustomFuncValue == src) { return } for _, pf := range st.PointerFields { i := int(pf.Index) p := unsafe.Pointer(uintptr(ptr) + pf.Offset) field := src.Field(i) // This field can be referenced by a pointer or interface inside itself. // Put the pointer to this field to visited to avoid any error. // // See https://github.com/huandu/go-clone/issues/3. if state.visited != nil && field.CanAddr() { ft := field.Type() fp := field.Addr().Pointer() vst := visit{ p: fp, t: reflect.PtrTo(ft), } nv := reflect.NewAt(ft, p) // The address of this field was visited, so fp must be a cycle pointer. // As this field is not fully cloned, the val stored in visited[visit] must be wrong. // It must be replaced by nv which will be the right value (it's incomplete right now). // // Unfortunately, if the val was used by previous clone routines, // there is no easy way to fix wrong values - all pointers must be traversed and fixed. if val, ok := state.visited[vst]; ok { state.invalid[visit{ p: val.Pointer(), t: vst.t, }] = nv } state.visited[vst] = nv } v := state.clone(field) shadowCopy(v, p) } } var typeOfString = reflect.TypeOf("") func shadowCopy(src reflect.Value, p unsafe.Pointer) { switch src.Kind() { case reflect.Bool: *(*bool)(p) = src.Bool() case reflect.Int: *(*int)(p) = int(src.Int()) case reflect.Int8: *(*int8)(p) = int8(src.Int()) case reflect.Int16: *(*int16)(p) = int16(src.Int()) case reflect.Int32: *(*int32)(p) = int32(src.Int()) case reflect.Int64: *(*int64)(p) = src.Int() case reflect.Uint: *(*uint)(p) = uint(src.Uint()) case reflect.Uint8: *(*uint8)(p) = uint8(src.Uint()) case reflect.Uint16: *(*uint16)(p) = uint16(src.Uint()) case reflect.Uint32: *(*uint32)(p) = uint32(src.Uint()) case reflect.Uint64: *(*uint64)(p) = src.Uint() case reflect.Uintptr: *(*uintptr)(p) = uintptr(src.Uint()) case reflect.Float32: *(*float32)(p) = float32(src.Float()) case reflect.Float64: *(*float64)(p) = src.Float() case reflect.Complex64: *(*complex64)(p) = complex64(src.Complex()) case reflect.Complex128: *(*complex128)(p) = src.Complex() case reflect.Array: t := src.Type() if src.CanAddr() { srcPtr := unsafe.Pointer(src.UnsafeAddr()) sz := t.Size() copy((*[maxByteSize]byte)(p)[:sz:sz], (*[maxByteSize]byte)(srcPtr)[:sz:sz]) return } val := reflect.NewAt(t, p).Elem() if src.CanInterface() { val.Set(src) return } sz := t.Elem().Size() num := src.Len() for i := 0; i < num; i++ { elemPtr := unsafe.Pointer(uintptr(p) + uintptr(i)*sz) shadowCopy(src.Index(i), elemPtr) } case reflect.Chan: *((*uintptr)(p)) = src.Pointer() case reflect.Func: t := src.Type() src = copyScalarValue(src) val := reflect.NewAt(t, p).Elem() val.Set(src) case reflect.Interface: *((*interfaceData)(p)) = parseReflectValue(src) case reflect.Map: *((*uintptr)(p)) = src.Pointer() case reflect.Ptr: *((*uintptr)(p)) = src.Pointer() case reflect.Slice: *(*sliceHeader)(p) = sliceHeader{ Data: src.Pointer(), Len: src.Len(), Cap: src.Cap(), } case reflect.String: s := src.String() val := reflect.NewAt(typeOfString, p).Elem() val.SetString(s) case reflect.Struct: t := src.Type() val := reflect.NewAt(t, p).Elem() if src.CanInterface() { val.Set(src) return } num := t.NumField() for i := 0; i < num; i++ { field := t.Field(i) fieldPtr := unsafe.Pointer(uintptr(p) + field.Offset) shadowCopy(src.Field(i), fieldPtr) } case reflect.UnsafePointer: // There is no way to copy unsafe.Pointer value. *((*uintptr)(p)) = src.Pointer() default: panic(fmt.Errorf("go-clone: impossible type `%v` when cloning private field", src.Type())) } } // fix tranverses v to update all pointer values in state.invalid. func (state *cloneState) fix(v reflect.Value) { if state == nil || len(state.invalid) == 0 { return } fix := &fixState{ allocator: state.allocator, fixed: fixMap{}, invalid: state.invalid, } fix.fix(v) } type fixState struct { allocator *Allocator fixed fixMap invalid invalidPointers } type fixMap map[visit]struct{} func (fix *fixState) new(t reflect.Type) reflect.Value { return fix.allocator.New(t) } func (fix *fixState) fix(v reflect.Value) (copied reflect.Value, changed int) { if fix.allocator.isScalar(v.Kind()) { return } switch v.Kind() { case reflect.Array: return fix.fixArray(v) case reflect.Chan: // Do nothing. return case reflect.Interface: return fix.fixInterface(v) case reflect.Map: return fix.fixMap(v) case reflect.Ptr: return fix.fixPtr(v) case reflect.Slice: return fix.fixSlice(v) case reflect.Struct: return fix.fixStruct(v) case reflect.String: // Do nothing. return default: panic(fmt.Errorf("go-clone: unsupported type `%v`", v.Type())) } } func (fix *fixState) fixArray(v reflect.Value) (copied reflect.Value, changed int) { t := v.Type() et := t.Elem() kind := et.Kind() if fix.allocator.isScalar(kind) { return } l := v.Len() for i := 0; i < l; i++ { elem := v.Index(i) if kind == reflect.Ptr { vst := visit{ p: elem.Pointer(), t: et, } if nv, ok := fix.invalid[vst]; ok { // If elem cannot be set, v must be copied to make it settable. // Don't do it unless there is no other choices. if !elem.CanSet() { copied = fix.new(t).Elem() shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) _, changed = fix.fixArray(copied) return } elem.Set(nv) changed++ continue } } fixed, c := fix.fix(elem) changed += c if fixed.IsValid() { // If elem cannot be set, v must be copied to make it settable. // Don't do it unless there is no other choices. if !elem.CanSet() { copied = fix.new(t).Elem() shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) _, changed = fix.fixArray(copied) return } elem.Set(fixed) } } return } func (fix *fixState) fixInterface(v reflect.Value) (copied reflect.Value, changed int) { if v.IsNil() { return } elem := v.Elem() t := elem.Type() kind := elem.Kind() if kind == reflect.Ptr { vst := visit{ p: elem.Pointer(), t: t, } if nv, ok := fix.invalid[vst]; ok { copied = nv.Convert(v.Type()) changed++ return } } copied, changed = fix.fix(elem) if copied.IsValid() { copied = copied.Convert(v.Type()) } return } func (fix *fixState) fixMap(v reflect.Value) (copied reflect.Value, changed int) { if v.IsNil() { return } t := v.Type() vst := visit{ p: v.Pointer(), t: t, } if _, ok := fix.fixed[vst]; ok { return } fix.fixed[vst] = struct{}{} kt := t.Key() et := t.Elem() keyKind := kt.Kind() elemKind := et.Kind() if isScalar := fix.allocator.isScalar; isScalar(keyKind) && isScalar(elemKind) { return } invalidKeys := map[reflect.Value][2]reflect.Value{} for iter := mapIter(v); iter.Next(); { key := iter.Key() elem := iter.Value() var fixed reflect.Value c := 0 if elemKind == reflect.Ptr { vst := visit{ p: elem.Pointer(), t: et, } if nv, ok := fix.invalid[vst]; ok { fixed = nv c++ } else { fixed, c = fix.fixPtr(elem) } } else { fixed, c = fix.fix(elem) } changed += c c = 0 if fixed.IsValid() { v = forceSetMapIndex(v, key, fixed) elem = fixed fixed = reflect.Value{} } if keyKind == reflect.Ptr { vst := visit{ p: key.Pointer(), t: kt, } if nv, ok := fix.invalid[vst]; ok { fixed = nv c++ } else { fixed, c = fix.fixPtr(key) } } else { fixed, c = fix.fix(key) } changed += c // Key cannot be changed immediately inside map range iteration. // Do it later. if fixed.IsValid() { invalidKeys[key] = [2]reflect.Value{fixed, elem} } } for key, kv := range invalidKeys { v = forceSetMapIndex(v, key, reflect.Value{}) v = forceSetMapIndex(v, kv[0], kv[1]) } return } func forceSetMapIndex(v, key, elem reflect.Value) (nv reflect.Value) { nv = v if !v.CanInterface() { nv = forceClearROFlag(v) } if !key.CanInterface() { key = forceClearROFlag(key) } if elem.IsValid() && !elem.CanInterface() { elem = forceClearROFlag(elem) } nv.SetMapIndex(key, elem) return } func (fix *fixState) fixPtr(v reflect.Value) (copied reflect.Value, changed int) { if v.IsNil() { return } vst := visit{ p: v.Pointer(), t: v.Type(), } if _, ok := fix.invalid[vst]; ok { panic(fmt.Errorf("go-clone: invalid pointers must have been fixed in other methods")) } if _, ok := fix.fixed[vst]; ok { return } fix.fixed[vst] = struct{}{} elem := v.Elem() _, changed = fix.fix(elem) return } func (fix *fixState) fixSlice(v reflect.Value) (copied reflect.Value, changed int) { if v.IsNil() { return } t := v.Type() et := t.Elem() kind := et.Kind() if fix.allocator.isScalar(kind) { return } l := v.Len() p := unsafe.Pointer(v.Pointer()) vst := visit{ p: uintptr(p), extra: l, t: t, } if _, ok := fix.fixed[vst]; ok { return } fix.fixed[vst] = struct{}{} for i := 0; i < l; i++ { elem := v.Index(i) var fixed reflect.Value c := 0 if kind == reflect.Ptr { vst := visit{ p: elem.Pointer(), t: et, } if nv, ok := fix.invalid[vst]; ok { fixed = nv } else { fixed, c = fix.fixPtr(elem) } } else { fixed, c = fix.fix(elem) } changed += c if fixed.IsValid() { sz := et.Size() elemPtr := unsafe.Pointer(uintptr(p) + sz*uintptr(i)) shadowCopy(fixed, elemPtr) } } return } func (fix *fixState) fixStruct(v reflect.Value) (copied reflect.Value, changed int) { t := v.Type() st := fix.allocator.loadStructType(t) if len(st.PointerFields) == 0 { return } for _, pf := range st.PointerFields { i := int(pf.Index) field := v.Field(i) ft := field.Type() if ft.Kind() == reflect.Ptr { vst := visit{ p: field.Pointer(), t: ft, } if nv, ok := fix.invalid[vst]; ok { // If v is not addressable, a new struct must be allocated. // Don't do it unless there is no other choices. if !v.CanAddr() { copied = fix.new(t).Elem() shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) _, changed = fix.fixStruct(copied) return } ptr := unsafe.Pointer(v.Addr().Pointer()) p := unsafe.Pointer(uintptr(ptr) + pf.Offset) shadowCopy(nv, p) continue } } fixed, c := fix.fix(field) changed += c if fixed.IsValid() { // If v is not addressable, a new struct must be allocated. // Don't do it unless there is no other choices. if !v.CanAddr() { copied = fix.new(t).Elem() shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) _, changed = fix.fixStruct(copied) return } ptr := unsafe.Pointer(v.Addr().Pointer()) p := unsafe.Pointer(uintptr(ptr) + pf.Offset) shadowCopy(fixed, p) } } return } go-clone-1.6.0/clone_benchmark_test.go000066400000000000000000000011401441225667700177330ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import "testing" func BenchmarkSimpleClone(b *testing.B) { orig := &testSimple{ Foo: 123, Bar: "abcd", } b.ResetTimer() for i := 0; i < b.N; i++ { Clone(orig) } } func BenchmarkComplexClone(b *testing.B) { m := map[string]*T{ "abc": { Foo: 123, Bar: map[string]interface{}{ "abc": 321, }, }, "def": { Foo: 456, Bar: map[string]interface{}{ "def": 789, }, }, } b.ResetTimer() for i := 0; i < b.N; i++ { Clone(m) } } go-clone-1.6.0/clone_common_test.go000066400000000000000000000372411441225667700173040ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "bytes" "container/list" "io" "reflect" "testing" "unsafe" "github.com/huandu/go-assert" ) var testFuncMap = map[string]func(t *testing.T, allocator *Allocator){ "Basic Clone": testClone, "Slowly linked list": testSlowlyLinkedList, "Slowly cycle linked list": testSlowlyCycleLinkedList, "Slowly fix invalid cycle pointers": testSlowlyFixInvalidCyclePointers, "Slowly fix invalid linked pointers": testSlowlyFixInvalidLinkedPointers, "Clone array": testCloneArray, "Clone map": testCloneMap, "Clone bytes buffer": testCloneBytesBuffer, "Clone unexported fields": testCloneUnexportedFields, "Clone unexported struct method": testCloneUnexportedStructMethod, "Clone reflect type": testCloneReflectType, } type T struct { Foo int Bar map[string]interface{} } func testClone(t *testing.T, allocator *Allocator) { arr := [4]string{"abc", "def", "ghi"} ch := make(chan int, 2) fn := func(int) {} var it io.Writer = &bytes.Buffer{} m := map[interface{}]string{ "abc": "efg", 123: "ghi", } slice := []string{"xyz", "opq"} st := T{ Foo: 1234, Bar: map[string]interface{}{ "abc": 123, "def": "ghi", }, } ptr := &st complex := []map[string][]*T{ { "abc": { {Foo: 456, Bar: map[string]interface{}{"abc": "def"}}, }, }, { "def": { {Foo: 987, Bar: map[string]interface{}{"abc": "def"}}, {Foo: 321, Bar: map[string]interface{}{"ghi": "xyz"}}, }, "ghi": { {Foo: 654, Bar: map[string]interface{}{"def": "abc"}}, }, }, } nested := func() interface{} { var nested []map[string][]*T var nestedPtr *T var nestedIf interface{} var nestedMap map[string]interface{} nested = []map[string][]*T{ { "abc": { {Foo: 987, Bar: map[string]interface{}{"def": nil, "nil": nil}}, {Foo: 321, Bar: map[string]interface{}{"ghi": nil, "def": nil, "cba": nil}}, {Foo: 456}, nil, }, }, } nestedPtr = &T{ Foo: 654, Bar: map[string]interface{}{ "xyz": nested, "opq": nil, }, } nestedIf = map[string]interface{}{ "rst": nested, } nestedMap = map[string]interface{}{} // Don't test it due to bug in Go. // https://github.com/golang/go/issues/33907 //nestedMap["opq"] = nestedMap nested[0]["abc"][0].Bar["def"] = nested nested[0]["abc"][1].Bar["ghi"] = nestedPtr nested[0]["abc"][1].Bar["def"] = nestedIf nested[0]["abc"][1].Bar["cba"] = nested nested[0]["abc"][2].Bar = nestedMap nested[0]["abc"][3] = nestedPtr nestedPtr.Bar["opq"] = nestedPtr return nested }() var nilSlice []int var nilChan chan bool var nilPtr *float64 cases := []interface{}{ 123, "abc", nil, true, testing.TB(nil), arr, ch, fn, it, m, ptr, slice, st, nested, complex, nilSlice, nilChan, nilPtr, } for _, c := range cases { var v1, v2 interface{} if reflect.DeepEqual(c, nested) { // Clone doesn't work on nested data. v1 = c } else { v1 = clone(allocator, c) } v2 = cloneSlowly(allocator, c) deepEqual(t, c, v1) deepEqual(t, c, v2) } } func deepEqual(t *testing.T, expected, actual interface{}) { a := assert.New(t) a.Use(&expected, &actual) val := reflect.ValueOf(actual) // It's not possible to compare chan value. if val.Kind() == reflect.Chan { cval := reflect.ValueOf(expected) a.Equal(cval.Type(), val.Type()) a.Equal(cval.Cap(), val.Cap()) return } if val.Kind() == reflect.Func { // It's not possible to compare func value either. cval := reflect.ValueOf(expected) a.Assert(cval.Type() == val.Type()) return } a.Equal(actual, expected) } func testSlowlyLinkedList(t *testing.T, allocator *Allocator) { a := assert.New(t) l := list.New() l.PushBack("v1") l.PushBack("v2") cloned := cloneSlowly(allocator, l).(*list.List) a.Equal(l.Len(), cloned.Len()) a.Equal(l.Front().Value, cloned.Front().Value) a.Equal(l.Back().Value, cloned.Back().Value) // There must be only two elements in cloned. a.Equal(cloned.Back(), cloned.Front().Next()) a.Equal(cloned.Back().Next(), nil) } type cycleLinkedList struct { elems []*list.Element elem *list.Element list *list.List } func testSlowlyCycleLinkedList(t *testing.T, allocator *Allocator) { a := assert.New(t) l := list.New() elem := l.PushBack("123") cycle := &cycleLinkedList{ elems: []*list.Element{elem}, elem: elem, list: l, } cloned := cloneSlowly(allocator, cycle).(*cycleLinkedList) a.Equal(l.Len(), cloned.list.Len()) a.Equal(elem.Value, cloned.list.Front().Value) // There must be only one element in cloned. a.Equal(cloned.list.Front(), cloned.list.Back()) a.Equal(cloned.list.Front().Next(), nil) a.Equal(cloned.list.Back().Next(), nil) } type cycleList struct { root cycleElement elem *cycleElement } type cycleElement struct { next *cycleElement list *cycleList } type cycleComplex struct { ch chan bool scalar int scalarArray *[1]int scalarSlice []string scalarStruct *reflect.Value _ []*cycleElement _ map[*cycleElement]*cycleElement _ interface{} array [2]*cycleElement slice []*cycleElement iface1, iface2 interface{} ptr1, ptr2 *cycleElement scalarMap map[string]int plainMap map[int]*cycleElement simpleMap map[*cycleList]*cycleElement complexMap map[*cycleElement]*cycleElement pair cycleElementPair pairValue interface{} refSlice *[]*cycleElement refComplexMap *map[*cycleElement]*cycleElement } type cycleElementPair struct { elem1, elem2 *cycleElement } func makeCycleElement() *cycleElement { list := &cycleList{} elem := &cycleElement{ next: &list.root, list: list, } list.root.next = elem list.root.list = list list.elem = elem return &list.root } func (elem *cycleElement) validateCycle(t *testing.T) { a := assert.New(t) // elem is the &list.root. a.Assert(elem == &elem.list.root) a.Assert(elem.next == elem.list.elem) a.Assert(elem.next.next == elem) } func testSlowlyFixInvalidCyclePointers(t *testing.T, allocator *Allocator) { var scalarArray [1]int scalarStruct := reflect.ValueOf(1) value := &cycleComplex{ ch: make(chan bool), scalar: 123, scalarArray: &scalarArray, scalarSlice: []string{"hello"}, scalarStruct: &scalarStruct, array: [2]*cycleElement{makeCycleElement(), makeCycleElement()}, slice: []*cycleElement{makeCycleElement(), makeCycleElement()}, iface1: makeCycleElement(), iface2: makeCycleElement(), ptr1: makeCycleElement(), ptr2: makeCycleElement(), scalarMap: map[string]int{ "foo": 123, }, plainMap: map[int]*cycleElement{ 123: makeCycleElement(), }, simpleMap: map[*cycleList]*cycleElement{ makeCycleElement().list: makeCycleElement(), }, complexMap: map[*cycleElement]*cycleElement{ makeCycleElement(): makeCycleElement(), }, } value.refSlice = &value.slice value.refComplexMap = &value.complexMap cloned := cloneSlowly(allocator, value).(*cycleComplex) cloned.array[0].validateCycle(t) cloned.array[1].validateCycle(t) cloned.slice[0].validateCycle(t) cloned.slice[1].validateCycle(t) cloned.iface1.(*cycleElement).validateCycle(t) cloned.iface2.(*cycleElement).validateCycle(t) cloned.ptr1.validateCycle(t) cloned.ptr2.validateCycle(t) cloned.plainMap[123].validateCycle(t) for k, v := range cloned.simpleMap { k.root.validateCycle(t) k.elem.next.validateCycle(t) v.validateCycle(t) } for k, v := range cloned.complexMap { k.validateCycle(t) v.validateCycle(t) } a := assert.New(t) a.Assert(cloned.refSlice == &cloned.slice) a.Assert(cloned.refComplexMap == &cloned.complexMap) } func makeLinkedElements() (elem1, elem2 *cycleElement) { list := &cycleList{} elem1 = &list.root elem2 = &cycleElement{ next: &list.root, list: list, } list.root.next = &cycleElement{} list.elem = elem2 return } func (elem *cycleElement) validateLinked(t *testing.T) { a := assert.New(t) // elem is the elem2. a.Assert(elem == elem.list.elem) a.Assert(elem.next == &elem.list.root) a.Assert(elem.next.next.next == nil) } func testSlowlyFixInvalidLinkedPointers(t *testing.T, allocator *Allocator) { value := &cycleComplex{ array: func() (elems [2]*cycleElement) { elems[0], elems[1] = makeLinkedElements() return }(), slice: func() []*cycleElement { elem1, elem2 := makeLinkedElements() return []*cycleElement{elem1, elem2} }(), scalarMap: map[string]int{ "foo": 123, }, plainMap: func() map[int]*cycleElement { elem1, elem2 := makeLinkedElements() return map[int]*cycleElement{ 1: elem1, 2: elem2, } }(), simpleMap: func() map[*cycleList]*cycleElement { elem1, elem2 := makeLinkedElements() return map[*cycleList]*cycleElement{ elem2.list: elem1, } }(), complexMap: func() map[*cycleElement]*cycleElement { elem1, elem2 := makeLinkedElements() return map[*cycleElement]*cycleElement{ elem1: elem2, } }(), } value.refSlice = &value.slice value.refComplexMap = &value.complexMap value.iface1, value.iface2 = makeLinkedElements() value.ptr1, value.ptr2 = makeLinkedElements() value.pair.elem1, value.pair.elem2 = makeLinkedElements() var pair cycleElementPair pair.elem1, pair.elem2 = makeLinkedElements() value.pairValue = pair cloned := cloneSlowly(allocator, value).(*cycleComplex) cloned.array[1].validateLinked(t) cloned.slice[1].validateLinked(t) cloned.iface2.(*cycleElement).validateLinked(t) cloned.ptr2.validateLinked(t) cloned.plainMap[2].validateLinked(t) for k := range cloned.simpleMap { k.elem.validateLinked(t) } for _, v := range cloned.complexMap { v.validateLinked(t) } value.pair.elem2.validateLinked(t) value.pairValue.(cycleElementPair).elem2.validateLinked(t) a := assert.New(t) a.Assert(cloned.refSlice == &cloned.slice) a.Assert(cloned.refComplexMap == &cloned.complexMap) } func testCloneArray(t *testing.T, allocator *Allocator) { a := assert.New(t) arr := [2]*T{ { Foo: 123, Bar: map[string]interface{}{ "abc": 123, }, }, { Foo: 456, Bar: map[string]interface{}{ "def": 456, "ghi": 789, }, }, } cloned := clone(allocator, arr).([2]*T) a.Use(&arr, &cloned) a.Equal(arr, cloned) // arr is not changed if cloned is mutated. cloned[0].Foo = 987 cloned[1].Bar["ghi"] = 321 a.Equal(arr[0].Foo, 123) a.Equal(arr[1].Bar["ghi"], 789) } func testCloneMap(t *testing.T, allocator *Allocator) { a := assert.New(t) m := map[string]*T{ "abc": { Foo: 123, Bar: map[string]interface{}{ "abc": 321, }, }, "def": { Foo: 456, Bar: map[string]interface{}{ "def": 789, }, }, } cloned := clone(allocator, m).(map[string]*T) a.Use(&m, &cloned) a.Equal(m, cloned) // m is not changed if cloned is mutated. cloned["abc"].Foo = 321 cloned["def"].Bar["def"] = 987 a.Equal(m["abc"].Foo, 123) a.Equal(m["def"].Bar["def"], 789) } func testCloneBytesBuffer(t *testing.T, allocator *Allocator) { a := assert.New(t) buf := &bytes.Buffer{} buf.WriteString("Hello, world!") dummy := make([]byte, len("Hello, ")) buf.Read(dummy) cloned := clone(allocator, buf).(*bytes.Buffer) a.Use(&buf, &cloned) // Data must be cloned. a.Equal(buf.Len(), cloned.Len()) a.Equal(buf.String(), cloned.String()) // Data must not share the same address. from := buf.Bytes() to := cloned.Bytes() a.Assert(&from[0] != &to[0]) buf.WriteString("!!!!!") a.NotEqual(buf.Len(), cloned.Len()) a.NotEqual(buf.String(), cloned.String()) } type Simple struct { Foo int Bar string } type Unexported struct { insider } type insider struct { i int i8 int8 i16 int16 i32 int32 i64 int64 u uint u8 uint8 u16 uint16 u32 uint32 u64 uint64 uptr uintptr b bool s string f32 float32 f64 float64 c64 complex64 c128 complex128 arr [4]string arrPtr *[10]byte ch chan bool fn func(s string) string method func([]byte) (int, error) iface io.Writer ifaceScalar io.Writer _ interface{} m map[string]interface{} ptr *Unexported _ *Unexported slice []*Unexported st Simple unsafePointer unsafe.Pointer t reflect.Type Simple } type scalarWriter int8 func (scalarWriter) Write(p []byte) (n int, err error) { return } func testCloneUnexportedFields(t *testing.T, allocator *Allocator) { a := assert.New(t) unexported := &Unexported{ insider: insider{ i: -1, i8: -8, i16: -16, i32: -32, i64: -64, u: 1, u8: 8, u16: 16, u32: 32, u64: 64, uptr: uintptr(0xDEADC0DE), b: true, s: "hello", f32: 3.2, f64: 6.4, c64: complex(6, 4), c128: complex(12, 8), arr: [4]string{ "a", "b", "c", "d", }, arrPtr: &[10]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, ch: make(chan bool, 5), fn: func(s string) string { return s + ", world!" }, method: bytes.NewBufferString("method").Write, iface: bytes.NewBufferString("interface"), ifaceScalar: scalarWriter(123), m: map[string]interface{}{ "key": "value", }, unsafePointer: unsafe.Pointer(&Unexported{}), st: Simple{ Foo: 123, Bar: "bar1", }, Simple: Simple{ Foo: 456, Bar: "bar2", }, t: reflect.TypeOf(&Simple{}), }, } unexported.m["loop"] = &unexported.m // Make pointer cycles. unexported.ptr = unexported unexported.slice = []*Unexported{unexported} cloned := cloneSlowly(allocator, unexported).(*Unexported) a.Use(&unexported, &cloned) // unsafe.Pointer is shadow copied. a.Assert(cloned.unsafePointer == unexported.unsafePointer) unexported.unsafePointer = nil cloned.unsafePointer = nil // chan cannot be compared, but its buffer can be verified. a.Equal(cap(cloned.ch), cap(unexported.ch)) unexported.ch = nil cloned.ch = nil // fn cannot be compared, but it can be called. a.Equal(cloned.fn("Hello"), unexported.fn("Hello")) unexported.fn = nil cloned.fn = nil // method cannot be compared, but it can be called. a.Assert(cloned.method != nil) a.NilError(cloned.method([]byte("1234"))) unexported.method = nil cloned.method = nil // cloned.m["loop"] must be exactly the same map of cloned.m. a.Assert(reflect.ValueOf(cloned.m["loop"]).Elem().Pointer() == reflect.ValueOf(cloned.m).Pointer()) // Don't test this map in reflect.DeepEqual due to bug in Go. // https://github.com/golang/go/issues/33907 unexported.m["loop"] = nil cloned.m["loop"] = nil // reflect.Type should be copied by value. a.Equal(reflect.ValueOf(cloned.t).Pointer(), reflect.ValueOf(unexported.t).Pointer()) // Finally, everything else should equal. a.Equal(unexported, cloned) } func testCloneUnexportedStructMethod(t *testing.T, allocator *Allocator) { a := assert.New(t) // Another complex case: clone a struct and a map of struct instead of ptr to a struct. st := insider{ m: map[string]interface{}{ "insider": insider{ method: bytes.NewBufferString("method").Write, }, }, } cloned := clone(allocator, st).(insider) a.Use(&st, &cloned) // For a struct copy, there is a tricky way to copy method. Test it. a.Assert(cloned.m["insider"].(insider).method != nil) n, err := cloned.m["insider"].(insider).method([]byte("1234")) a.NilError(err) a.Equal(n, 4) } func testCloneReflectType(t *testing.T, allocator *Allocator) { a := assert.New(t) // reflect.rtype should not be deeply cloned. foo := reflect.TypeOf("foo") cloned := clone(allocator, foo).(reflect.Type) a.Use(&foo, &cloned) from := reflect.ValueOf(foo) to := reflect.ValueOf(cloned) a.Assert(from.Pointer() == to.Pointer()) } go-clone-1.6.0/clone_sample_test.go000066400000000000000000000011761441225667700172730ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import "fmt" func ExampleSlowly() { type ListNode struct { Data int Next *ListNode } node1 := &ListNode{ Data: 1, } node2 := &ListNode{ Data: 2, } node3 := &ListNode{ Data: 3, } node1.Next = node2 node2.Next = node3 node3.Next = node1 // We must use `Slowly` to clone a circular linked list. node := Slowly(node1).(*ListNode) for i := 0; i < 10; i++ { fmt.Println(node.Data) node = node.Next } // Output: // 1 // 2 // 3 // 1 // 2 // 3 // 1 // 2 // 3 // 1 } go-clone-1.6.0/clone_test.go000066400000000000000000000004521441225667700157260ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import "testing" func TestCloneAll(t *testing.T) { for name, fn := range testFuncMap { t.Run(name, func(t *testing.T) { fn(t, defaultAllocator) }) } } go-clone-1.6.0/cloner.go000066400000000000000000000012551441225667700150530ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone // Cloner implements clone API with given allocator. type Cloner struct { allocator *Allocator } // MakeCloner creates a cloner with given allocator. func MakeCloner(allocator *Allocator) Cloner { return Cloner{ allocator: allocator, } } // Clone clones v with given allocator. func (c Cloner) Clone(v interface{}) interface{} { return clone(c.allocator, v) } // CloneSlowly clones v with given allocator. // It can clone v with cycle pointer. func (c Cloner) CloneSlowly(v interface{}) interface{} { return cloneSlowly(c.allocator, v) } go-clone-1.6.0/generic/000077500000000000000000000000001441225667700146535ustar00rootroot00000000000000go-clone-1.6.0/generic/README.md000066400000000000000000000010051441225667700161260ustar00rootroot00000000000000# Generic `go-clone` API [![Go](https://github.com/huandu/go-clone/workflows/Go/badge.svg)](https://github.com/huandu/go-clone/actions) [![Go Doc](https://godoc.org/github.com/huandu/go-clone/generic?status.svg)](https://pkg.go.dev/github.com/huandu/go-clone/generic) This package is a set of generic API for `go-clone`. Almost all methods are simple proxies with a few exceptions. It requires `go1.18` or later to build this package. Please read document in [the main project](../README.md) for more information. go-clone-1.6.0/generic/api.go000066400000000000000000000025311441225667700157540ustar00rootroot00000000000000// Copyright 2022 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. // Package clone provides functions to deep clone any Go data. // It also provides a wrapper to protect a pointer from any unexpected mutation. // // This package is only a proxy to original go-clone package with generic support. // To minimize the maintenace cost, there is no doc in this package. // Please read the document in https://pkg.go.dev/github.com/huandu/go-clone instead. package clone import ( "reflect" "unsafe" "github.com/huandu/go-clone" ) type Func = clone.Func type Allocator = clone.Allocator type AllocatorMethods = clone.AllocatorMethods func Clone[T any](t T) T { return clone.Clone(t).(T) } func Slowly[T any](t T) T { return clone.Slowly(t).(T) } func Wrap[T any](t T) T { return clone.Wrap(t).(T) } func Unwrap[T any](t T) T { return clone.Unwrap(t).(T) } func Undo[T any](t T) { clone.Undo(t) } func MarkAsOpaquePointer(t reflect.Type) { clone.MarkAsOpaquePointer(t) } func MarkAsScalar(t reflect.Type) { clone.MarkAsScalar(t) } func SetCustomFunc(t reflect.Type, fn Func) { clone.SetCustomFunc(t, fn) } func FromHeap() *Allocator { return clone.FromHeap() } func NewAllocator(pool unsafe.Pointer, methods *AllocatorMethods) (allocator *Allocator) { return clone.NewAllocator(pool, methods) } go-clone-1.6.0/generic/api_test.go000066400000000000000000000017171441225667700170200ustar00rootroot00000000000000// Copyright 2022 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "reflect" "testing" "github.com/huandu/go-assert" ) type MyType struct { Foo int bar string } func TestGenericAPI(t *testing.T) { a := assert.New(t) original := &MyType{ Foo: 123, bar: "player", } var v *MyType = Clone(original) a.Equal(v, original) v = Slowly(original) a.Equal(v, original) v = Wrap(original) a.Equal(v, original) a.Assert(Unwrap(v) == original) v.Foo = 777 a.Equal(Unwrap(v).Foo, original.Foo) Undo(v) a.Equal(v, original) } type MyPointer struct { Foo *int P *MyPointer } func TestMarkAsAPI(t *testing.T) { a := assert.New(t) MarkAsScalar(reflect.TypeOf(MyPointer{})) MarkAsOpaquePointer(reflect.TypeOf(&MyPointer{})) n := 0 orignal := MyPointer{ Foo: &n, } orignal.P = &orignal v := Clone(orignal) a.Assert(v.Foo == orignal.Foo) a.Assert(v.P == &orignal) } go-clone-1.6.0/generic/arena.go000066400000000000000000000053741441225667700163010ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. //go:build go1.20 && goexperiment.arenas // +build go1.20,goexperiment.arenas package clone import ( "arena" "reflect" "runtime" "unsafe" "github.com/huandu/go-clone" ) // The arenaAllocator allocates memory from arena. var arenaAllocatorMethods = &clone.AllocatorMethods{ New: arenaNew, MakeSlice: arenaMakeSlice, MakeMap: arenaMakeMap, MakeChan: arenaMakeChan, } // FromArena creates an allocator using arena a to allocate memory. func FromArena(a *arena.Arena) *clone.Allocator { return clone.NewAllocator(unsafe.Pointer(a), arenaAllocatorMethods) } // ArenaClone recursively deep clones v to a new value in arena a. // It works in the same way as Clone, except it allocates all memory from arena. func ArenaClone[T any](a *arena.Arena, v T) (nv T) { src := reflect.ValueOf(v) cloned := FromArena(a).Clone(src) if !cloned.IsValid() { return } dst := reflect.ValueOf(&nv).Elem() dst.Set(cloned) return } // ArenaCloneSlowly recursively deep clones v to a new value in arena a. // It works in the same way as Slowly, except it allocates all memory from arena. func ArenaCloneSlowly[T any](a *arena.Arena, v T) (nv T) { src := reflect.ValueOf(v) cloned := FromArena(a).CloneSlowly(src) if !cloned.IsValid() { return } dst := reflect.ValueOf(&nv).Elem() dst.Set(cloned) return } func arenaNew(pool unsafe.Pointer, t reflect.Type) reflect.Value { return reflect.ArenaNew((*arena.Arena)(pool), reflect.PtrTo(t)) } // Define the slice header again to mute golint's warning. type sliceHeader reflect.SliceHeader func arenaMakeSlice(pool unsafe.Pointer, t reflect.Type, len, cap int) reflect.Value { a := (*arena.Arena)(pool) // As of go1.20, there is no reflect method to allocate slice in arena. // Following code is a hack to allocate a large enough byte buffer // and then cast it to T[]. et := t.Elem() l := int(et.Size()) total := l * cap data := arena.MakeSlice[byte](a, total, total) ptr := unsafe.Pointer(&data[0]) elem := reflect.NewAt(et, ptr) slicePtr := reflect.ArenaNew(a, reflect.PtrTo(t)) *(*sliceHeader)(slicePtr.UnsafePointer()) = sliceHeader{ Data: elem.Pointer(), Len: l, Cap: cap, } runtime.KeepAlive(elem) slice := slicePtr.Elem() return slice.Slice3(0, len, cap) } func arenaMakeMap(pool unsafe.Pointer, t reflect.Type, n int) reflect.Value { // As of go1.20, there is no way to allocate map in arena. // Fallback to heap allocation. return reflect.MakeMapWithSize(t, n) } func arenaMakeChan(pool unsafe.Pointer, t reflect.Type, buffer int) reflect.Value { // As of go1.20, there is no way to allocate chan in arena. // Fallback to heap allocation. return reflect.MakeChan(t, buffer) } go-clone-1.6.0/generic/arena_test.go000066400000000000000000000026021441225667700173270ustar00rootroot00000000000000// Copyright 2023 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. //go:build go1.20 && goexperiment.arenas // +build go1.20,goexperiment.arenas package clone import ( "arena" "reflect" "runtime" "testing" "unsafe" "github.com/huandu/go-assert" ) func TestArenaClone(t *testing.T) { a := assert.New(t) type FooInner struct { Value float64 } type Foo struct { A string B []int C *FooInner D map[int]string } foo := &Foo{ A: "hello", B: []int{1, 2, 3}, C: &FooInner{ Value: 45.6, }, D: map[int]string{ 7: "7", }, } ar := arena.NewArena() cloned := ArenaClone(ar, foo) a.Equal(foo, cloned) // If a pointer is not allocated by arena, arena.Clone() will return the pointer as it is. // Use this feature to check whether a pointer is allocated by arena. prevStr := foo.A str := arena.Clone(cloned.A) a.Assert(((*reflect.StringHeader)(unsafe.Pointer(&str))).Data != ((*reflect.StringHeader)(unsafe.Pointer(&prevStr))).Data) a.Assert(arena.Clone(cloned) != foo) slice := arena.Clone(cloned.B) a.Assert(&slice[0] != &foo.B[0]) a.Assert(arena.Clone(cloned.C) != foo.C) prevStr = foo.D[7] str = arena.Clone(cloned.D[7]) a.Assert(((*reflect.StringHeader)(unsafe.Pointer(&str))).Data != ((*reflect.StringHeader)(unsafe.Pointer(&prevStr))).Data) // Make sure ar is alive. runtime.KeepAlive(ar) } go-clone-1.6.0/generic/go.mod000066400000000000000000000002761441225667700157660ustar00rootroot00000000000000module github.com/huandu/go-clone/generic go 1.18 require ( github.com/huandu/go-assert v1.1.5 github.com/huandu/go-clone v1.5.1 ) require github.com/davecgh/go-spew v1.1.1 // indirect go-clone-1.6.0/generic/go.sum000066400000000000000000000020441441225667700160060ustar00rootroot00000000000000github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= github.com/huandu/go-clone v1.5.1 h1:1wlwYRlHZo4HspdOM0YQ6O7Y7bjtxTrrt+4jnDeejVo= github.com/huandu/go-clone v1.5.1/go.mod h1:ReGivhG6op3GYr+UY3lS6mxjKp7MIGTknuU5TbTVaXE= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= go-clone-1.6.0/generic/register.go000066400000000000000000000014501441225667700170260ustar00rootroot00000000000000// Copyright 2022 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "reflect" "sync/atomic" ) // Record the count of cloning atomic.Pointer[T] for test purpose only. var registerAtomicPointerCalled int32 // RegisterAtomicPointer registers a custom clone function for atomic.Pointer[T]. func RegisterAtomicPointer[T any]() { SetCustomFunc(reflect.TypeOf(atomic.Pointer[T]{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Pointer[T]. oldValue := old.Addr().Interface().(*atomic.Pointer[T]) newValue := new.Addr().Interface().(*atomic.Pointer[T]) v := oldValue.Load() newValue.Store(v) atomic.AddInt32(®isterAtomicPointerCalled, 1) }) } go-clone-1.6.0/generic/register_test.go000066400000000000000000000014201441225667700200620ustar00rootroot00000000000000// Copyright 2022 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "sync/atomic" "testing" "github.com/huandu/go-assert" ) type RegisteredPayload struct { T string } type UnregisteredPayload struct { T string } type Pointers struct { P1 atomic.Pointer[RegisteredPayload] P2 atomic.Pointer[UnregisteredPayload] } func TestRegisterAtomicPointer(t *testing.T) { a := assert.New(t) s := &Pointers{} stackPointerCannotBeCloned := atomic.Pointer[RegisteredPayload]{} // Register atomic.Pointer[RegisteredPayload] only. RegisterAtomicPointer[RegisteredPayload]() prev := registerAtomicPointerCalled Clone(s) Clone(stackPointerCannotBeCloned) a.Equal(registerAtomicPointerCalled, prev+1) } go-clone-1.6.0/go.mod000066400000000000000000000001271441225667700143450ustar00rootroot00000000000000module github.com/huandu/go-clone go 1.13 require github.com/huandu/go-assert v1.1.5 go-clone-1.6.0/go.sum000066400000000000000000000015711441225667700143760ustar00rootroot00000000000000github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= go-clone-1.6.0/headers.go000066400000000000000000000005521441225667700152030ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import "reflect" // As golint reports warning on possible misuse of these headers, // avoid to use these header types directly to silience golint. type sliceHeader reflect.SliceHeader type stringHeader reflect.StringHeader go-clone-1.6.0/interfacedata.go000066400000000000000000000023731441225667700163650ustar00rootroot00000000000000package clone import ( "reflect" "unsafe" ) const sizeOfPointers = unsafe.Sizeof((interface{})(0)) / unsafe.Sizeof(uintptr(0)) // interfaceData is the underlying data of an interface. // As the reflect.Value's interfaceData method is deprecated, // it may be broken in any Go release. // It's better to create a custom to hold the data. // // The type of interfaceData fields must be poniters. // It's a way to cheat Go compile to generate calls to write barrier // when copying interfaces. type interfaceData struct { _ [sizeOfPointers]unsafe.Pointer } var reflectValuePtrOffset uintptr func init() { t := reflect.TypeOf(reflect.Value{}) found := false fields := t.NumField() for i := 0; i < fields; i++ { field := t.Field(i) if field.Type.Kind() == reflect.UnsafePointer { found = true reflectValuePtrOffset = field.Offset break } } if !found { panic("go-clone: fail to find internal ptr field in reflect.Value") } } // parseReflectValue returns the underlying interface data in a reflect value. // It assumes that v is an interface value. func parseReflectValue(v reflect.Value) interfaceData { pv := (unsafe.Pointer)(uintptr(unsafe.Pointer(&v)) + reflectValuePtrOffset) ptr := *(*unsafe.Pointer)(pv) return *(*interfaceData)(ptr) } go-clone-1.6.0/mapiter.go000066400000000000000000000011411441225667700152240ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. // +build !go1.12 package clone import ( "reflect" ) type iter struct { m reflect.Value k reflect.Value keys []reflect.Value } func mapIter(m reflect.Value) *iter { return &iter{ m: m, keys: m.MapKeys(), } } func (it *iter) Next() bool { if len(it.keys) == 0 { return false } it.k = it.keys[0] it.keys = it.keys[1:] return true } func (it *iter) Key() reflect.Value { return it.k } func (it *iter) Value() reflect.Value { return it.m.MapIndex(it.k) } go-clone-1.6.0/mapiter_go112.go000066400000000000000000000003721441225667700161420ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. // +build go1.12 package clone import ( "reflect" ) func mapIter(m reflect.Value) *reflect.MapIter { return m.MapRange() } go-clone-1.6.0/memory.go000066400000000000000000000006101441225667700150730ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone // maxByteSize is a large enough value to cheat Go compiler // when converting unsafe address to []byte. // It's not actually used in runtime. // // The value 2^30 is the max value AFAIK to make Go compiler happy on all archs. const maxByteSize = 1 << 30 go-clone-1.6.0/structtype.go000066400000000000000000000217061441225667700160220ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "crypto/elliptic" "fmt" "reflect" "sync" "sync/atomic" "time" "unsafe" ) type structType struct { PointerFields []structFieldType fn Func } type structFieldType struct { Offset uintptr // The offset from the beginning of the struct. Index int // The index of the field. } var zeroStructType = structType{} func init() { // Some well-known scalar-like structs. MarkAsScalar(reflect.TypeOf(time.Time{})) MarkAsScalar(reflect.TypeOf(reflect.Value{})) // Special case for elliptic.Curve which is used by TLS ECC certificate. // Package crypto/tls uses elliptic.Curve as enum values // so that they should be treated as opaque pointers. // // As elliptic.Curve is an interface, it can be *elliptic.CurveParam or elliptic.p256Curve. MarkAsOpaquePointer(reflect.TypeOf(&elliptic.CurveParams{})) curves := []elliptic.Curve{ elliptic.P224(), elliptic.P256(), elliptic.P384(), elliptic.P521(), } for _, curve := range curves { MarkAsOpaquePointer(reflect.ValueOf(curve).Type()) } // Special case for reflect.Type (actually *reflect.rtype): // The *reflect.rtype should not be copied as it is immutable and // may point to a variable that actual type is not reflect.rtype, // e.g. *reflect.arrayType or *reflect.chanType. MarkAsOpaquePointer(reflect.TypeOf(reflect.TypeOf(0))) // Some well-known no-copy structs. // // Almost all structs defined in package "sync" and "sync/atomic" are set // except `sync.Once` which can be safely cloned with a correct done value. SetCustomFunc(reflect.TypeOf(sync.Mutex{}), emptyCloneFunc) SetCustomFunc(reflect.TypeOf(sync.RWMutex{}), emptyCloneFunc) SetCustomFunc(reflect.TypeOf(sync.WaitGroup{}), emptyCloneFunc) SetCustomFunc(reflect.TypeOf(sync.Cond{}), func(allocator *Allocator, old, new reflect.Value) { // Copy the New func from old value. oldL := old.FieldByName("L") newL := allocator.Clone(oldL) new.FieldByName("L").Set(newL) }) SetCustomFunc(reflect.TypeOf(sync.Pool{}), func(allocator *Allocator, old, new reflect.Value) { // Copy the New func from old value. oldFn := old.FieldByName("New") newFn := allocator.Clone(oldFn) new.FieldByName("New").Set(newFn) }) SetCustomFunc(reflect.TypeOf(sync.Map{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone all values inside sync.Map. oldMap := old.Addr().Interface().(*sync.Map) newMap := new.Addr().Interface().(*sync.Map) oldMap.Range(func(key, value interface{}) bool { k := clone(allocator, key) v := clone(allocator, value) newMap.Store(k, v) return true }) }) SetCustomFunc(reflect.TypeOf(atomic.Value{}), func(allocator *Allocator, old, new reflect.Value) { if !old.CanAddr() { return } // Clone value inside atomic.Value. oldValue := old.Addr().Interface().(*atomic.Value) newValue := new.Addr().Interface().(*atomic.Value) v := oldValue.Load() cloned := clone(allocator, v) newValue.Store(cloned) }) } // MarkAsScalar marks t as a scalar type in heap allocator, // so that all clone methods will copy t by value. // If t is not struct or pointer to struct, MarkAsScalar ignores t. // // In the most cases, it's not necessary to call it explicitly. // If a struct type contains scalar type fields only, the struct will be marked as scalar automatically. // // Here is a list of types marked as scalar by default: // - time.Time // - reflect.Value func MarkAsScalar(t reflect.Type) { defaultAllocator.MarkAsScalar(t) } // MarkAsOpaquePointer marks t as an opaque pointer in heap allocator, // so that all clone methods will copy t by value. // If t is not a pointer, MarkAsOpaquePointer ignores t. // // Here is a list of types marked as opaque pointers by default: // - `elliptic.Curve`, which is `*elliptic.CurveParam` or `elliptic.p256Curve`; // - `reflect.Type`, which is `*reflect.rtype` defined in `runtime`. func MarkAsOpaquePointer(t reflect.Type) { defaultAllocator.MarkAsOpaquePointer(t) } // Func is a custom func to clone value from old to new. // The new is a zero value // which `new.CanSet()` and `new.CanAddr()` is guaranteed to be true. // // Func must update the new to return result. type Func func(allocator *Allocator, old, new reflect.Value) // emptyCloneFunc is used to disable shadow copy. // It's useful when cloning sync.Mutex as cloned value must be a zero value. func emptyCloneFunc(allocator *Allocator, old, new reflect.Value) {} // SetCustomFunc sets a custom clone function for type t in heap allocator. // If t is not struct or pointer to struct, SetCustomFunc ignores t. // // If fn is nil, remove the custom clone function for type t. func SetCustomFunc(t reflect.Type, fn Func) { defaultAllocator.SetCustomFunc(t, fn) } // Init creates a new value of src.Type() and shadow copies all content from src. // If noCustomFunc is set to true, custom clone function will be ignored. // // Init returns true if the value is cloned by a custom func. // Caller should skip cloning struct fields in depth. func (st *structType) Init(allocator *Allocator, src, nv reflect.Value, noCustomFunc bool) (done bool) { dst := nv.Elem() if !noCustomFunc && st.fn != nil { if !src.CanInterface() { src = forceClearROFlag(src) } st.fn(allocator, src, dst) done = true return } ptr := unsafe.Pointer(nv.Pointer()) shadowCopy(src, ptr) done = len(st.PointerFields) == 0 return } func (st *structType) CanShadowCopy() bool { return len(st.PointerFields) == 0 && st.fn == nil } // IsScalar returns true if k should be considered as a scalar type. // // For the sake of performance, string is considered as a scalar type unless arena is enabled. // If we need to deep copy string value in some cases, we can create a new allocator with custom isScalar function // in which we can return false when k is reflect.String. // // // Create a new allocator which treats string as non-scalar type. // allocator := NewAllocator(nil, &AllocatorMethods{ // IsScalar: func(k reflect.Kind) bool { // return k != reflect.String && IsScalar(k) // }, // }) func IsScalar(k reflect.Kind) bool { switch k { case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128, reflect.Func, reflect.UnsafePointer, reflect.Invalid: return true case reflect.String: // If arena is not enabled, string can be copied as scalar safely // as it's immutable by design. return !arenaIsEnabled } return false } func copyScalarValue(src reflect.Value) reflect.Value { if src.CanInterface() { return src } // src is an unexported field value. Copy its value. switch src.Kind() { case reflect.Bool: return reflect.ValueOf(src.Bool()) case reflect.Int: return reflect.ValueOf(int(src.Int())) case reflect.Int8: return reflect.ValueOf(int8(src.Int())) case reflect.Int16: return reflect.ValueOf(int16(src.Int())) case reflect.Int32: return reflect.ValueOf(int32(src.Int())) case reflect.Int64: return reflect.ValueOf(src.Int()) case reflect.Uint: return reflect.ValueOf(uint(src.Uint())) case reflect.Uint8: return reflect.ValueOf(uint8(src.Uint())) case reflect.Uint16: return reflect.ValueOf(uint16(src.Uint())) case reflect.Uint32: return reflect.ValueOf(uint32(src.Uint())) case reflect.Uint64: return reflect.ValueOf(src.Uint()) case reflect.Uintptr: return reflect.ValueOf(uintptr(src.Uint())) case reflect.Float32: return reflect.ValueOf(float32(src.Float())) case reflect.Float64: return reflect.ValueOf(src.Float()) case reflect.Complex64: return reflect.ValueOf(complex64(src.Complex())) case reflect.Complex128: return reflect.ValueOf(src.Complex()) case reflect.String: return reflect.ValueOf(src.String()) case reflect.Func: t := src.Type() if src.IsNil() { return reflect.Zero(t) } // Don't use this trick unless we have no choice. return forceClearROFlag(src) case reflect.UnsafePointer: return reflect.ValueOf(unsafe.Pointer(src.Pointer())) } panic(fmt.Errorf("go-clone: impossible type `%v` when cloning private field", src.Type())) } var typeOfInterface = reflect.TypeOf((*interface{})(nil)).Elem() // forceClearROFlag clears all RO flags in v to make v accessible. // It's a hack based on the fact that InterfaceData is always available on RO data. // This hack can be broken in any Go version. // Don't use it unless we have no choice, e.g. copying func in some edge cases. func forceClearROFlag(v reflect.Value) reflect.Value { var i interface{} indirect := 0 // Save flagAddr. for v.CanAddr() { v = v.Addr() indirect++ } v = v.Convert(typeOfInterface) nv := reflect.ValueOf(&i) *(*interfaceData)(unsafe.Pointer(nv.Pointer())) = parseReflectValue(v) cleared := nv.Elem().Elem() for indirect > 0 { cleared = cleared.Elem() indirect-- } return cleared } go-clone-1.6.0/structtype_sample_test.go000066400000000000000000000055541441225667700204250ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "encoding/json" "fmt" "os" "reflect" ) func ExampleMarkAsScalar() { type ScalarType struct { stderr *os.File } MarkAsScalar(reflect.TypeOf(new(ScalarType))) scalar := &ScalarType{ stderr: os.Stderr, } cloned := Clone(scalar).(*ScalarType) // cloned is a shadow copy of scalar // so that the pointer value should be the same. fmt.Println(scalar.stderr == cloned.stderr) // Output: // true } func ExampleMarkAsOpaquePointer() { type OpaquePointerType struct { foo int } MarkAsOpaquePointer(reflect.TypeOf(new(OpaquePointerType))) opaque := &OpaquePointerType{ foo: 123, } cloned := Clone(opaque).(*OpaquePointerType) // cloned is a shadow copy of opaque. // so that opaque and cloned should be the same. fmt.Println(opaque == cloned) // Output: // true } func ExampleSetCustomFunc() { type MyStruct struct { Data []interface{} } // Filter nil values in Data when cloning old value. SetCustomFunc(reflect.TypeOf(MyStruct{}), func(allocator *Allocator, old, new reflect.Value) { // The new is a zero value of MyStruct. // We can get its address to update it. value := new.Addr().Interface().(*MyStruct) // The old is guaranteed to be a MyStruct. // As old.CanAddr() may be false, we'd better to read Data field directly. data := old.FieldByName("Data") l := data.Len() for i := 0; i < l; i++ { val := data.Index(i) if val.IsNil() { continue } n := allocator.Clone(val).Interface() value.Data = append(value.Data, n) } }) slice := &MyStruct{ Data: []interface{}{ "abc", nil, 123, nil, }, } cloned := Clone(slice).(*MyStruct) fmt.Println(cloned.Data) // Output: // [abc 123] } func ExampleSetCustomFunc_partiallyClone() { type T struct { Value int } type MyStruct struct { S1 *T S2 string S3 int } SetCustomFunc(reflect.TypeOf(T{}), func(allocator *Allocator, old, new reflect.Value) { oldField := old.FieldByName("Value") newField := new.FieldByName("Value") newField.SetInt(oldField.Int() + 100) }) SetCustomFunc(reflect.TypeOf(MyStruct{}), func(allocator *Allocator, old, new reflect.Value) { // We can call allocator.Clone to clone the old value without worrying about dead loop. // This custom func is temporary disabled for the old value in allocator. new.Set(allocator.Clone(old)) oldField := old.FieldByName("S2") newField := new.FieldByName("S2") newField.SetString(oldField.String() + "_suffix") }) st := &MyStruct{ S1: &T{ Value: 1, }, S2: "abc", S3: 2, } cloned := Clone(st).(*MyStruct) data, _ := json.Marshal(st) fmt.Println(string(data)) data, _ = json.Marshal(cloned) fmt.Println(string(data)) // Output: // {"S1":{"Value":1},"S2":"abc","S3":2} // {"S1":{"Value":101},"S2":"abc_suffix","S3":2} } go-clone-1.6.0/structtype_test.go000066400000000000000000000130461441225667700170570ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "crypto/elliptic" "reflect" "sync" "sync/atomic" "testing" "time" "unsafe" "github.com/huandu/go-assert" ) type NoPointer struct { Foo int Bar string } type WithPointer struct { foo map[string]string bar []int } func TestMarkAsScalar(t *testing.T) { a := assert.New(t) oldCnt := 0 newCnt := 0 a.Use(&oldCnt, &newCnt) // Count cache. defaultAllocator.cachedStructTypes.Range(func(key, value interface{}) bool { oldCnt++ return true }) // Add 2 valid types. MarkAsScalar(reflect.TypeOf(new(NoPointer))) MarkAsScalar(reflect.TypeOf(new(WithPointer))) MarkAsScalar(reflect.TypeOf(new(int))) // Should be ignored. // Count cache against. defaultAllocator.cachedStructTypes.Range(func(key, value interface{}) bool { newCnt++ return true }) a.Assert(oldCnt+2 == newCnt) // As WithPointer is marked as scalar, Clone returns a shadow copy. value := &WithPointer{ foo: map[string]string{ "key": "value", }, bar: []int{1, 2, 3}, } cloned := Clone(value).(*WithPointer) a.Use(&value, &cloned) // cloned is a shadow copy. a.Equal(value, cloned) value.foo["key"] = "modified" value.bar[1] = 2000 a.Equal(value, cloned) } type MapKeys struct { mb map[bool]interface{} mi map[int]interface{} mi8 map[int8]interface{} mi16 map[int16]interface{} mi32 map[int32]interface{} mi64 map[int64]interface{} mui map[uint]interface{} mu8 map[uint8]interface{} mu16 map[uint16]interface{} mu32 map[uint32]interface{} mu64 map[uint64]interface{} muintptr map[uintptr]interface{} ms map[string]interface{} mf32 map[float32]interface{} mf64 map[float64]interface{} mc64 map[complex64]interface{} mc128 map[complex128]interface{} miface map[interface{}]interface{} mis map[Simple]interface{} // misp map[*Simple]interface{} munsafe map[unsafe.Pointer]interface{} } func TestCopyScalarValue(t *testing.T) { a := assert.New(t) st := &MapKeys{ mb: map[bool]interface{}{true: 2}, mi: map[int]interface{}{-1: 2}, mi8: map[int8]interface{}{-8: 2}, mi16: map[int16]interface{}{-16: 2}, mi32: map[int32]interface{}{-32: 2}, mi64: map[int64]interface{}{-64: 2}, mui: map[uint]interface{}{1: 2}, mu8: map[uint8]interface{}{8: 2}, mu16: map[uint16]interface{}{16: 2}, mu32: map[uint32]interface{}{32: 2}, mu64: map[uint64]interface{}{64: 2}, muintptr: map[uintptr]interface{}{0xDEADC0DE: 2}, ms: map[string]interface{}{"str": 2}, mf32: map[float32]interface{}{3.2: 2}, mf64: map[float64]interface{}{6.4: 2}, mc64: map[complex64]interface{}{complex(6, 4): 2}, mc128: map[complex128]interface{}{complex(1.2, 8): 2}, miface: map[interface{}]interface{}{"iface": 2}, mis: map[Simple]interface{}{{Foo: 123}: 2}, munsafe: map[unsafe.Pointer]interface{}{unsafe.Pointer(t): 2}, } cloned := Clone(st).(*MapKeys) a.Equal(st, cloned) } type noCopyValues struct { syncMutex sync.Mutex syncRWMutex sync.RWMutex syncWaitGroup sync.WaitGroup syncCond *sync.Cond syncPool sync.Pool syncMap sync.Map syncOnce sync.Once atomicValue atomic.Value } func TestCloneNoCopyValues(t *testing.T) { a := assert.New(t) v := &noCopyValues{ syncCond: sync.NewCond(func() *sync.Mutex { return &sync.Mutex{} }()), syncPool: sync.Pool{ New: func() interface{} { return "pool" }, }, } v.syncMutex.Lock() defer v.syncMutex.Unlock() v.syncRWMutex.RLock() defer v.syncRWMutex.RUnlock() v.syncWaitGroup.Add(1) defer v.syncWaitGroup.Done() v.syncCond.L.Lock() defer v.syncCond.L.Unlock() poolValue := v.syncPool.Get() v.syncPool.Put(poolValue) v.syncMap.Store("foo", "bar") v.syncOnce.Do(func() {}) v.atomicValue.Store("value") cloned := Clone(v).(*noCopyValues) done := make(chan bool, 1) ticker := time.NewTicker(100 * time.Millisecond) defer ticker.Stop() a.Run("race", func(t *testing.T) { a := assert.New(t) cloned.syncMutex.Lock() _ = 0 cloned.syncMutex.Unlock() cloned.syncRWMutex.RLock() _ = 0 cloned.syncRWMutex.RUnlock() cloned.syncWaitGroup.Add(1) cloned.syncWaitGroup.Done() cloned.syncWaitGroup.Wait() cloned.syncCond.L.Lock() _ = 0 cloned.syncCond.L.Unlock() poolValue := cloned.syncPool.Get() a.Equal(poolValue, "pool") mapValue, ok := cloned.syncMap.Load("foo") a.Equal(mapValue, "bar") a.Assert(ok) onceValueShouldBeTrue := true cloned.syncOnce.Do(func() { onceValueShouldBeTrue = false }) a.Assert(onceValueShouldBeTrue) value := cloned.atomicValue.Load() a.Equal(value, "value") done <- true }) select { case <-done: case <-ticker.C: a.Fatalf("unexpected lock is detected.") } } func TestCloneCurveAsScalar(t *testing.T) { a := assert.New(t) curves := []elliptic.Curve{elliptic.P224(), elliptic.P256(), elliptic.P384(), elliptic.P521()} cloned := Clone(curves).([]elliptic.Curve) for i, curve := range curves { c := cloned[i] a.Assert(curve == c) } } type testOpaquePointer struct { foo int } func TestMarkAsOpaquePointer(t *testing.T) { a := assert.New(t) // Mark *testOpaquePointer as opaque pointer. MarkAsOpaquePointer(reflect.TypeOf(&testOpaquePointer{})) // No-op if set a struct type as opaque. MarkAsOpaquePointer(reflect.TypeOf(testOpaquePointer{})) opaque := &testOpaquePointer{ foo: 1234, } cloned := Clone(&opaque).(**testOpaquePointer) a.Assert(&opaque != cloned) a.Assert(opaque == *cloned) } go-clone-1.6.0/wrapper.go000066400000000000000000000076621441225667700152610ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "encoding/binary" "hash/crc64" "reflect" "sync" "unsafe" ) var ( sizeOfChecksum = unsafe.Sizeof(uint64(0)) crc64Table = crc64.MakeTable(crc64.ECMA) cachedWrapperTypes sync.Map ) // Wrap creates a wrapper of v, which must be a pointer. // If v is not a pointer, Wrap simply returns v and do nothing. // // The wrapper is a deep clone of v's value. It holds a shadow copy to v internally. // // t := &T{Foo: 123} // v := Wrap(t).(*T) // v is a clone of t. // reflect.DeepEqual(t, v) == true // v equals t. // v.Foo = 456 // v.Foo is changed, but t.Foo doesn't change. // orig := Unwrap(v) // Use `Unwrap` to discard wrapper and return original value, which is t. // orig.(*T) == t // orig and t is exactly the same. // Undo(v) // Use `Undo` to discard any change on v. // v.Foo == t.Foo // Now, the value of v and t are the same again. func Wrap(v interface{}) interface{} { if v == nil { return v } val := reflect.ValueOf(v) pt := val.Type() if val.Kind() != reflect.Ptr { return v } t := pt.Elem() elem := val.Elem() ptr := unsafe.Pointer(val.Pointer()) cache, ok := cachedWrapperTypes.Load(t) if !ok { cache = reflect.StructOf([]reflect.StructField{ { Name: "T", Type: t, Anonymous: true, }, { Name: "Checksum", Type: reflect.TypeOf(uint64(0)), }, { Name: "Origin", Type: pt, }, }) cachedWrapperTypes.Store(t, cache) } wrapperType := cache.(reflect.Type) pw := defaultAllocator.New(wrapperType) wrapperPtr := unsafe.Pointer(pw.Pointer()) wrapper := pw.Elem() // Equivalent code: wrapper.T = Clone(v) field := wrapper.Field(0) field.Set(heapCloneState.clone(elem)) // Equivalent code: wrapper.Checksum = makeChecksum(v) checksumPtr := unsafe.Pointer((uintptr(wrapperPtr) + t.Size())) *(*uint64)(checksumPtr) = makeChecksum(t, uintptr(wrapperPtr), uintptr(ptr)) // Equivalent code: wrapper.Origin = v originPtr := unsafe.Pointer((uintptr(wrapperPtr) + t.Size() + sizeOfChecksum)) *(*uintptr)(originPtr) = uintptr(ptr) return field.Addr().Interface() } func validateChecksum(t reflect.Type, ptr unsafe.Pointer) bool { pw := uintptr(ptr) orig := uintptr(getOrigin(t, ptr)) checksum := *(*uint64)(unsafe.Pointer(uintptr(ptr) + t.Size())) expected := makeChecksum(t, pw, orig) return checksum == expected } func makeChecksum(t reflect.Type, pw uintptr, orig uintptr) uint64 { var data [binary.MaxVarintLen64 * 2]byte binary.PutUvarint(data[:binary.MaxVarintLen64], uint64(pw)) binary.PutUvarint(data[binary.MaxVarintLen64:], uint64(orig)) return crc64.Checksum(data[:], crc64Table) } func getOrigin(t reflect.Type, ptr unsafe.Pointer) unsafe.Pointer { return *(*unsafe.Pointer)(unsafe.Pointer(uintptr(ptr) + t.Size() + sizeOfChecksum)) } // Unwrap returns v's original value if v is a wrapped value. // Otherwise, simply returns v itself. func Unwrap(v interface{}) interface{} { if v == nil { return v } val := reflect.ValueOf(v) if !isWrapped(val) { return v } origVal := origin(val) return origVal.Interface() } func origin(val reflect.Value) reflect.Value { pt := val.Type() t := pt.Elem() ptr := unsafe.Pointer(val.Pointer()) orig := getOrigin(t, ptr) origVal := reflect.NewAt(t, orig) return origVal } // Undo discards any change made in wrapped value. // If v is not a wrapped value, nothing happens. func Undo(v interface{}) { if v == nil { return } val := reflect.ValueOf(v) if !isWrapped(val) { return } origVal := origin(val) elem := val.Elem() elem.Set(heapCloneState.clone(origVal.Elem())) } func isWrapped(val reflect.Value) bool { pt := val.Type() if pt.Kind() != reflect.Ptr { return false } t := pt.Elem() ptr := unsafe.Pointer(val.Pointer()) return validateChecksum(t, ptr) } go-clone-1.6.0/wrapper_benchmark_test.go000066400000000000000000000015241441225667700203210ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import "testing" func BenchmarkUnwrap(b *testing.B) { orig := &testType{ Foo: "abcd", Bar: map[string]interface{}{ "def": 123, "ghi": 78.9, }, Player: []float64{ 12.3, 45.6, -78.9, }, } wrapped := Wrap(orig) b.ResetTimer() for i := 0; i < b.N; i++ { Unwrap(wrapped) } } func BenchmarkSimpleWrap(b *testing.B) { orig := &testSimple{ Foo: 123, Bar: "abcd", } b.ResetTimer() for i := 0; i < b.N; i++ { Wrap(orig) } } func BenchmarkComplexWrap(b *testing.B) { orig := &testType{ Foo: "abcd", Bar: map[string]interface{}{ "def": 123, "ghi": 78.9, }, Player: []float64{ 12.3, 45.6, -78.9, }, } b.ResetTimer() for i := 0; i < b.N; i++ { Wrap(orig) } } go-clone-1.6.0/wrapper_sample_test.go000066400000000000000000000016371441225667700176550ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import "fmt" func ExampleWrap() { // Suppose we have a type T defined as following. // type T struct { // Foo int // } v := &T{ Foo: 123, } w := Wrap(v).(*T) // Wrap value to protect it. // Use w freely. The type of w is the same as that of v. // It's OK to modify w. The change will not affect v. w.Foo = 456 fmt.Println(w.Foo) // 456 fmt.Println(v.Foo) // 123 // Once we need the original value stored in w, call `Unwrap`. orig := Unwrap(w).(*T) fmt.Println(orig == v) // true fmt.Println(orig.Foo) // 123 // Or, we can simply undo any change made in w. // Note that `Undo` is significantly slower than `Unwrap`, thus // the latter is always preferred. Undo(w) fmt.Println(w.Foo) // 123 // Output: // 456 // 123 // true // 123 // 123 } go-clone-1.6.0/wrapper_test.go000066400000000000000000000044231441225667700163100ustar00rootroot00000000000000// Copyright 2019 Huan Du. All rights reserved. // Licensed under the MIT license that can be found in the LICENSE file. package clone import ( "testing" "github.com/huandu/go-assert" ) type testType struct { Foo string Bar map[string]interface{} Player []float64 } type testSimple struct { Foo int Bar string } func TestWrap(t *testing.T) { a := assert.New(t) a.Equal(Wrap(nil), nil) orig := &testType{ Foo: "abcd", Bar: map[string]interface{}{ "def": 123, "ghi": 78.9, }, Player: []float64{ 12.3, 45.6, -78.9, }, } wrapped := Wrap(orig).(*testType) a.Use(&orig, &wrapped) a.Equal(orig, wrapped) a.Equal(Wrap(wrapped), wrapped) wrapped.Foo = "xyz" wrapped.Bar["ghi"] = 98.7 wrapped.Player[1] = 65.4 a.Equal(orig.Foo, "abcd") a.Equal(orig.Bar["ghi"], 78.9) a.Equal(orig.Player[1], 45.6) actual := Unwrap(wrapped).(*testType) a.Assert(orig == actual) } func TestWrapScalarPtr(t *testing.T) { a := assert.New(t) i := 123 c := &i v := Wrap(c).(*int) orig := Unwrap(v).(*int) a.Use(&a, &i, &c, &v) a.Assert(*v == *c) a.Assert(orig == c) } func TestWrapNonePtr(t *testing.T) { a := assert.New(t) cases := []interface{}{ 123, nil, "abcd", []string{"ghi"}, map[string]int{"xyz": 123}, } for _, c := range cases { v := Wrap(c) a.Equal(c, v) } } func TestUnwrapValueWhichIsNotWrapped(t *testing.T) { a := assert.New(t) s := &testType{ Foo: "abcd", Bar: map[string]interface{}{ "def": 123, "ghi": 78.9, }, Player: []float64{ 12.3, 45.6, -78.9, }, } v := Unwrap(s).(*testType) v.Foo = "xyz" a.Equal(s, v) } func TestUnwrapPlainValueWhichIsNotWrapped(t *testing.T) { a := assert.New(t) i := 0 cases := []interface{}{ 123, "abc", nil, &i, } for _, c := range cases { v := Unwrap(c) a.Equal(c, v) old := c Undo(c) a.Equal(c, old) } } func TestUndo(t *testing.T) { a := assert.New(t) orig := &testType{ Foo: "abcd", Bar: map[string]interface{}{ "def": 123, "ghi": 78.9, }, Player: []float64{ 12.3, 45.6, -78.9, }, } wrapped := Wrap(orig).(*testType) a.Use(&orig, &wrapped) wrapped.Foo = "xyz" wrapped.Bar["ghi"] = 98.7 wrapped.Player[1] = 65.4 a.Equal(orig.Foo, "abcd") a.Equal(orig.Bar["ghi"], 78.9) a.Equal(orig.Player[1], 45.6) Undo(wrapped) a.Equal(orig, wrapped) }