Memory Management in Golang
Go provides automatic memory management through garbage collection, but understanding how memory works in Go is crucial for writing efficient programs. This tutorial covers Go’s memory model, garbage collection, and best practices for memory-efficient code.
Go’s Memory Model
Stack vs Heap Allocation
Go automatically decides where to allocate memory:
package main
import "fmt"
// Stack allocation (fast, automatic cleanup)
func stackFunction() {
x := 42 // Allocated on stack
y := "hello" // Allocated on stack
z := [3]int{1, 2, 3} // Small arrays often on stack
fmt.Println(x, y, z)
}
// Heap allocation (managed by GC)
func heapFunction() *int {
x := 42 // Might be allocated on heap if escaped
return &x // Address escapes to heap
}
func main() {
stackFunction()
ptr := heapFunction()
fmt.Println(*ptr)
}Escape Analysis
Go’s compiler performs escape analysis to determine if variables should be allocated on the stack or heap.
# View escape analysis
go build -gcflags="-m" main.goOutput shows:
./main.go:10: x escapes to heap
./main.go:15: &x escapes to heapGarbage Collection
How GC Works
Go uses a concurrent, tri-color mark-and-sweep garbage collector.
package main
import (
"runtime"
"time"
)
func main() {
// Force garbage collection
runtime.GC()
// Get GC statistics
var stats runtime.MemStats
runtime.ReadMemStats(&stats)
fmt.Printf("GC cycles: %d\n", stats.NumGC)
fmt.Printf("Allocated: %d bytes\n", stats.Alloc)
fmt.Printf("Total allocated: %d bytes\n", stats.TotalAlloc)
}GC Tuning
// Set GC target percentage (default 100%)
runtime.SetGCPercent(50) // Run GC when heap is 50% larger than previous
// Force GC
runtime.GC()
// Disable GC (not recommended for production)
runtime.SetGCPercent(-1)
// Re-enable GC
runtime.SetGCPercent(100)Memory Statistics
package main
import (
"fmt"
"runtime"
"time"
)
func printMemStats() {
var m runtime.MemStats
runtime.ReadMemStats(&m)
fmt.Printf("Alloc = %v MiB", m.Alloc/1024/1024)
fmt.Printf("\tTotalAlloc = %v MiB", m.TotalAlloc/1024/1024)
fmt.Printf("\tSys = %v MiB", m.Sys/1024/1024)
fmt.Printf("\tNumGC = %v\n", m.NumGC)
}
func main() {
printMemStats()
// Allocate some memory
var data [][]byte
for i := 0; i < 100; i++ {
data = append(data, make([]byte, 1024*1024)) // 1MB each
}
printMemStats()
runtime.GC()
printMemStats()
}Memory Leaks
Common Causes of Memory Leaks
- Goroutines that never exit
- Global variables holding references
- Unclosed resources
- Circular references
// Memory leak: goroutine never exits
func leakyGoroutine() {
ch := make(chan int)
go func() {
for {
select {
case <-ch:
return
default:
// Do work but never exit
}
}
}()
// ch is never sent to, goroutine leaks
}
// Memory leak: global map
var cache = make(map[string][]byte)
func addToCache(key string, data []byte) {
cache[key] = data // Data never cleaned up
}
// Fixed version with size limit
const maxCacheSize = 100
func addToCacheFixed(key string, data []byte) {
if len(cache) >= maxCacheSize {
// Remove oldest entries
for k := range cache {
delete(cache, k)
break
}
}
cache[key] = data
}Detecting Memory Leaks
package main
import (
"runtime"
"time"
)
func monitorMemory() {
var m runtime.MemStats
for {
runtime.ReadMemStats(&m)
fmt.Printf("Alloc: %d KB, NumGC: %d\n", m.Alloc/1024, m.NumGC)
time.Sleep(time.Second)
}
}
func leakyFunction() {
var data [][]byte
for {
data = append(data, make([]byte, 1024*1024)) // 1MB
time.Sleep(time.Millisecond * 100)
}
}
func main() {
go monitorMemory()
leakyFunction()
}Efficient Memory Usage
Object Pooling
package main
import "sync"
type ObjectPool struct {
pool sync.Pool
}
func NewObjectPool() *ObjectPool {
return &ObjectPool{
pool: sync.Pool{
New: func() interface{} {
return make([]byte, 1024) // 1KB buffer
},
},
}
}
func (op *ObjectPool) Get() []byte {
return op.pool.Get().([]byte)
}
func (op *ObjectPool) Put(buf []byte) {
// Reset buffer if needed
for i := range buf {
buf[i] = 0
}
op.pool.Put(buf)
}
func main() {
pool := NewObjectPool()
// Get buffer from pool
buf := pool.Get()
// Use buffer...
copy(buf, []byte("hello"))
// Return to pool
pool.Put(buf)
}Slice Capacity Management
// Inefficient: grows slice multiple times
func inefficient() []int {
var result []int
for i := 0; i < 1000; i++ {
result = append(result, i) // May reallocate multiple times
}
return result
}
// Efficient: preallocate capacity
func efficient() []int {
result := make([]int, 0, 1000) // Preallocate capacity
for i := 0; i < 1000; i++ {
result = append(result, i) // No reallocation
}
return result
}
// Very efficient: known size
func mostEfficient() []int {
result := make([]int, 1000) // Exact size
for i := 0; i < 1000; i++ {
result[i] = i
}
return result
}String Concatenation
import "strings"
// Inefficient for many concatenations
func badConcat(parts []string) string {
result := ""
for _, part := range parts {
result += part // Creates new string each time
}
return result
}
// Efficient
func goodConcat(parts []string) string {
var builder strings.Builder
for _, part := range parts {
builder.WriteString(part)
}
return builder.String()
}Memory Profiling
Creating Memory Profiles
package main
import (
"os"
"runtime/pprof"
)
func main() {
// Create memory profile file
f, err := os.Create("mem.prof")
if err != nil {
panic(err)
}
defer f.Close()
// Run your code here...
allocateMemory()
// Write heap profile
runtime.GC() // Force garbage collection
pprof.WriteHeapProfile(f)
}
func allocateMemory() {
var data [][]byte
for i := 0; i < 100; i++ {
data = append(data, make([]byte, 1024*1024)) // 1MB each
}
_ = data // Prevent optimization
}Analyzing Memory Profiles
# Analyze profile
go tool pprof mem.prof
# Interactive commands:
# top - show memory usage by function
# list function_name - show source code
# web - generate web visualization
# png - generate PNG imageRuntime Memory Inspection
package main
import (
"fmt"
"runtime"
"unsafe"
)
func inspectMemory() {
// Get current goroutine
pc, file, line, ok := runtime.Caller(1)
fmt.Printf("Called from %s:%d\n", file, line)
// Get memory stats
var m runtime.MemStats
runtime.ReadMemStats(&m)
fmt.Printf("Heap: %d bytes\n", m.HeapAlloc)
fmt.Printf("Stack: %d bytes\n", m.StackInuse)
// Force GC and see difference
runtime.GC()
runtime.ReadMemStats(&m)
fmt.Printf("After GC - Heap: %d bytes\n", m.HeapAlloc)
}
func main() {
inspectMemory()
}Advanced Memory Management
Finalizers
package main
import (
"fmt"
"runtime"
)
type Resource struct {
name string
}
func (r *Resource) Close() {
fmt.Printf("Closing resource: %s\n", r.name)
}
func newResource(name string) *Resource {
r := &Resource{name: name}
runtime.SetFinalizer(r, (*Resource).Close)
return r
}
func main() {
r := newResource("database")
// r.Close() not called, but finalizer will run during GC
runtime.GC()
runtime.GC() // Finalizers run after second GC
}Memory-Mapped Files
package main
import (
"os"
"syscall"
)
func mmapFile(filename string) ([]byte, error) {
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
stat, err := file.Stat()
if err != nil {
return nil, err
}
// Memory map the file
data, err := syscall.Mmap(int(file.Fd()), 0, int(stat.Size()),
syscall.PROT_READ, syscall.MAP_SHARED)
if err != nil {
return nil, err
}
return data, nil
}
func munmapFile(data []byte) error {
return syscall.Munmap(data)
}Best Practices
1. Minimize Allocations
// Bad: allocates new slice each time
func badFilter(numbers []int, predicate func(int) bool) []int {
var result []int
for _, n := range numbers {
if predicate(n) {
result = append(result, n) // May reallocate
}
}
return result
}
// Good: preallocate when possible
func goodFilter(numbers []int, predicate func(int) bool) []int {
result := make([]int, 0, len(numbers)) // Preallocate
for _, n := range numbers {
if predicate(n) {
result = append(result, n)
}
}
return result
}2. Reuse Objects
// Object pool for expensive objects
var bufferPool = sync.Pool{
New: func() interface{} {
return make([]byte, 4096)
},
}
func processData(data []byte) {
buf := bufferPool.Get().([]byte)
defer bufferPool.Put(buf)
// Use buf...
copy(buf, data)
// Process buf
}3. Be Careful with Closures
// Bad: closure captures large slice
func badClosure() {
largeSlice := make([]byte, 1024*1024) // 1MB
go func() {
// largeSlice is captured, can't be GC'd until goroutine exits
_ = largeSlice
}()
}
// Good: pass only needed data
func goodClosure() {
largeSlice := make([]byte, 1024*1024)
go func(data []byte) {
// Only the needed data is captured
_ = data
}(largeSlice[:100]) // Pass slice of needed portion
}4. Use Appropriate Data Structures
// For sparse data, use map instead of slice
sparseData := make(map[int]int) // Better than []int with many zeros
// For ordered data with deletions, consider linked list alternatives
// or use slice with compaction
5. Profile Memory Usage
// Add memory profiling to your application
import _ "net/http/pprof" // Import for profiling
func main() {
go func() {
log.Println(http.ListenAndServe("localhost:6060", nil))
}()
// Your application code...
}6. Monitor GC Performance
func monitorGC() {
var lastGC uint32
for {
stats := runtime.MemStats{}
runtime.ReadMemStats(&stats)
if stats.NumGC > lastGC {
fmt.Printf("GC %d: %v bytes, %v ns\n",
stats.NumGC, stats.NextGC, stats.PauseTotalNs)
lastGC = stats.NumGC
}
time.Sleep(time.Second)
}
}Common Memory Issues
1. Unintended Heap Allocations
// This causes heap allocation
func heapAlloc() *int {
x := 42
return &x // x escapes to heap
}
// This stays on stack
func stackAlloc() int {
x := 42
return x
}2. Slice Append Gotchas
func sliceGotcha() {
s1 := []int{1, 2, 3}
s2 := append(s1, 4) // Modifies s1 if capacity allows
fmt.Println(s1) // May print [1, 2, 3, 4]
fmt.Println(s2) // Prints [1, 2, 3, 4]
}3. Interface Boxing
// Interface{} boxing can cause allocations
func processInterface(v interface{}) {
// v is boxed, may cause heap allocation
_ = v
}
// Generic approach avoids boxing
func processGeneric[T any](v T) {
// No boxing with generics
_ = v
}4. String to Byte Conversions
// This allocates new byte slice
func badStringToBytes(s string) []byte {
return []byte(s) // Allocates
}
// Reuse buffer when possible
func goodStringToBytes(s string, buf []byte) []byte {
buf = buf[:0] // Reset length
buf = append(buf, s...)
return buf
}External Resources:
Related Tutorials:
Last updated on