Performance Optimisation
Overview
Section titled “Overview”Optimise your Wails application for speed, memory efficiency, and responsiveness.
Frontend Optimisation
Section titled “Frontend Optimisation”Bundle Size
Section titled “Bundle Size”export default { build: { rollupOptions: { output: { manualChunks: { vendor: ['react', 'react-dom'], }, }, }, minify: 'terser', terserOptions: { compress: { drop_console: true, }, }, },}Code Splitting
Section titled “Code Splitting”// Lazy load componentsconst Settings = lazy(() => import('./Settings'))
function App() { return ( <Suspense fallback={<Loading />}> <Settings /> </Suspense> )}Asset Optimisation
Section titled “Asset Optimisation”// Optimise imagesimport { defineConfig } from 'vite'import imagemin from 'vite-plugin-imagemin'
export default defineConfig({ plugins: [ imagemin({ gifsicle: { optimizationLevel: 3 }, optipng: { optimizationLevel: 7 }, svgo: { plugins: [{ removeViewBox: false }] }, }), ],})Backend Optimisation
Section titled “Backend Optimisation”Efficient Bindings
Section titled “Efficient Bindings”// ❌ Bad: Return everythingfunc (s *Service) GetAllData() []Data { return s.db.FindAll() // Could be huge}
// ✅ Good: Paginatefunc (s *Service) GetData(page, size int) (*PagedData, error) { return s.db.FindPaged(page, size)}Caching
Section titled “Caching”type CachedService struct { cache *lru.Cache ttl time.Duration}
func (s *CachedService) GetData(key string) (interface{}, error) { // Check cache if val, ok := s.cache.Get(key); ok { return val, nil }
// Fetch and cache data, err := s.fetchData(key) if err != nil { return nil, err }
s.cache.Add(key, data) return data, nil}Goroutines for Long Operations
Section titled “Goroutines for Long Operations”func (s *Service) ProcessLargeFile(path string) error { // Process in background go func() { result, err := s.process(path) if err != nil { s.app.Event.Emit("process-error", err.Error()) return } s.app.Event.Emit("process-complete", result) }()
return nil}Memory Optimisation
Section titled “Memory Optimisation”Avoid Memory Leaks
Section titled “Avoid Memory Leaks”// ❌ Bad: Goroutine leakfunc (s *Service) StartPolling() { ticker := time.NewTicker(1 * time.Second) go func() { for range ticker.C { s.poll() } }() // ticker never stopped!}
// ✅ Good: Proper cleanupfunc (s *Service) StartPolling() { ticker := time.NewTicker(1 * time.Second) s.stopChan = make(chan bool)
go func() { for { select { case <-ticker.C: s.poll() case <-s.stopChan: ticker.Stop() return } } }()}
func (s *Service) StopPolling() { close(s.stopChan)}Pool Resources
Section titled “Pool Resources”var bufferPool = sync.Pool{ New: func() interface{} { return new(bytes.Buffer) },}
func processData(data []byte) []byte { buf := bufferPool.Get().(*bytes.Buffer) defer bufferPool.Put(buf)
buf.Reset() buf.Write(data) // Process... return buf.Bytes()}Event Optimisation
Section titled “Event Optimisation”Debounce Events
Section titled “Debounce Events”// Debounce frequent eventslet debounceTimerfunction handleInput(value) { clearTimeout(debounceTimer) debounceTimer = setTimeout(() => { UpdateData(value) }, 300)}Batch Updates
Section titled “Batch Updates”type BatchProcessor struct { items []Item mu sync.Mutex timer *time.Timer}
func (b *BatchProcessor) Add(item Item) { b.mu.Lock() defer b.mu.Unlock()
b.items = append(b.items, item)
if b.timer == nil { b.timer = time.AfterFunc(100*time.Millisecond, b.flush) }}
func (b *BatchProcessor) flush() { b.mu.Lock() items := b.items b.items = nil b.timer = nil b.mu.Unlock()
// Process batch processBatch(items)}Build Optimisation
Section titled “Build Optimisation”Binary Size
Section titled “Binary Size”# Strip debug symbolswails3 build -ldflags "-s -w"
# Reduce binary size furthergo build -ldflags="-s -w" -trimpathCompilation Speed
Section titled “Compilation Speed”# Use build cachego build -buildmode=default
# Parallel compilationgo build -p 8Profiling
Section titled “Profiling”CPU Profiling
Section titled “CPU Profiling”import "runtime/pprof"
func profileCPU() { f, _ := os.Create("cpu.prof") defer f.Close()
pprof.StartCPUProfile(f) defer pprof.StopCPUProfile()
// Code to profile}Memory Profiling
Section titled “Memory Profiling”import "runtime/pprof"
func profileMemory() { f, _ := os.Create("mem.prof") defer f.Close()
runtime.GC() pprof.WriteHeapProfile(f)}Analyse Profiles
Section titled “Analyse Profiles”# View CPU profilego tool pprof cpu.prof
# View memory profilego tool pprof mem.prof
# Web interfacego tool pprof -http=:8080 cpu.profBest Practices
Section titled “Best Practices”- Profile before optimising
- Cache expensive operations
- Use pagination for large datasets
- Debounce frequent events
- Pool resources
- Clean up goroutines
- Optimise bundle size
- Use lazy loading
❌ Don’t
Section titled “❌ Don’t”- Don’t optimise prematurely
- Don’t ignore memory leaks
- Don’t block the main thread
- Don’t return huge datasets
- Don’t skip profiling
- Don’t forget cleanup
Performance Checklist
Section titled “Performance Checklist”- Frontend bundle optimised
- Images compressed
- Code splitting implemented
- Backend methods paginated
- Caching implemented
- Goroutines cleaned up
- Events debounced
- Binary size optimised
- Profiling done
- Memory leaks fixed
Next Steps
Section titled “Next Steps”- Architecture - Application architecture patterns
- Testing - Test your application
- Building - Build optimised binaries