...

Source file src/go.uber.org/zap/zapcore/sampler_bench_test.go

Documentation: go.uber.org/zap/zapcore

     1  // Copyright (c) 2016 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package zapcore_test
    22  
    23  import (
    24  	"fmt"
    25  	"sync/atomic"
    26  	"testing"
    27  	"time"
    28  
    29  	"github.com/stretchr/testify/assert"
    30  	"go.uber.org/zap/internal/ztest"
    31  
    32  	//revive:disable:dot-imports
    33  	. "go.uber.org/zap/zapcore"
    34  )
    35  
    36  var counterTestCases = [][]string{
    37  	// some stuff I made up
    38  	{
    39  		"foo",
    40  		"bar",
    41  		"baz",
    42  		"alpha",
    43  		"bravo",
    44  		"charlie",
    45  		"delta",
    46  	},
    47  
    48  	// shuf -n50 /usr/share/dict/words
    49  	{
    50  		"unbracing",
    51  		"stereotomy",
    52  		"supranervian",
    53  		"moaning",
    54  		"exchangeability",
    55  		"gunyang",
    56  		"sulcation",
    57  		"dariole",
    58  		"archheresy",
    59  		"synchronistically",
    60  		"clips",
    61  		"unsanctioned",
    62  		"Argoan",
    63  		"liparomphalus",
    64  		"layship",
    65  		"Fregatae",
    66  		"microzoology",
    67  		"glaciaria",
    68  		"Frugivora",
    69  		"patterist",
    70  		"Grossulariaceae",
    71  		"lithotint",
    72  		"bargander",
    73  		"opisthographical",
    74  		"cacography",
    75  		"chalkstone",
    76  		"nonsubstantialism",
    77  		"sardonicism",
    78  		"calamiform",
    79  		"lodginghouse",
    80  		"predisposedly",
    81  		"topotypic",
    82  		"broideress",
    83  		"outrange",
    84  		"gingivolabial",
    85  		"monoazo",
    86  		"sparlike",
    87  		"concameration",
    88  		"untoothed",
    89  		"Camorrism",
    90  		"reissuer",
    91  		"soap",
    92  		"palaiotype",
    93  		"countercharm",
    94  		"yellowbird",
    95  		"palterly",
    96  		"writinger",
    97  		"boatfalls",
    98  		"tuglike",
    99  		"underbitten",
   100  	},
   101  
   102  	// shuf -n100 /usr/share/dict/words
   103  	{
   104  		"rooty",
   105  		"malcultivation",
   106  		"degrade",
   107  		"pseudoindependent",
   108  		"stillatory",
   109  		"antiseptize",
   110  		"protoamphibian",
   111  		"antiar",
   112  		"Esther",
   113  		"pseudelminth",
   114  		"superfluitance",
   115  		"teallite",
   116  		"disunity",
   117  		"spirignathous",
   118  		"vergency",
   119  		"myliobatid",
   120  		"inosic",
   121  		"overabstemious",
   122  		"patriarchally",
   123  		"foreimagine",
   124  		"coetaneity",
   125  		"hemimellitene",
   126  		"hyperspatial",
   127  		"aulophyte",
   128  		"electropoion",
   129  		"antitrope",
   130  		"Amarantus",
   131  		"smaltine",
   132  		"lighthead",
   133  		"syntonically",
   134  		"incubous",
   135  		"versation",
   136  		"cirsophthalmia",
   137  		"Ulidian",
   138  		"homoeography",
   139  		"Velella",
   140  		"Hecatean",
   141  		"serfage",
   142  		"Spermaphyta",
   143  		"palatoplasty",
   144  		"electroextraction",
   145  		"aconite",
   146  		"avirulence",
   147  		"initiator",
   148  		"besmear",
   149  		"unrecognizably",
   150  		"euphoniousness",
   151  		"balbuties",
   152  		"pascuage",
   153  		"quebracho",
   154  		"Yakala",
   155  		"auriform",
   156  		"sevenbark",
   157  		"superorganism",
   158  		"telesterion",
   159  		"ensand",
   160  		"nagaika",
   161  		"anisuria",
   162  		"etching",
   163  		"soundingly",
   164  		"grumpish",
   165  		"drillmaster",
   166  		"perfumed",
   167  		"dealkylate",
   168  		"anthracitiferous",
   169  		"predefiance",
   170  		"sulphoxylate",
   171  		"freeness",
   172  		"untucking",
   173  		"misworshiper",
   174  		"Nestorianize",
   175  		"nonegoistical",
   176  		"construe",
   177  		"upstroke",
   178  		"teated",
   179  		"nasolachrymal",
   180  		"Mastodontidae",
   181  		"gallows",
   182  		"radioluminescent",
   183  		"uncourtierlike",
   184  		"phasmatrope",
   185  		"Clunisian",
   186  		"drainage",
   187  		"sootless",
   188  		"brachyfacial",
   189  		"antiheroism",
   190  		"irreligionize",
   191  		"ked",
   192  		"unfact",
   193  		"nonprofessed",
   194  		"milady",
   195  		"conjecture",
   196  		"Arctomys",
   197  		"guapilla",
   198  		"Sassenach",
   199  		"emmetrope",
   200  		"rosewort",
   201  		"raphidiferous",
   202  		"pooh",
   203  		"Tyndallize",
   204  	},
   205  }
   206  
   207  func BenchmarkSampler_Check(b *testing.B) {
   208  	for _, keys := range counterTestCases {
   209  		b.Run(fmt.Sprintf("%v keys", len(keys)), func(b *testing.B) {
   210  			fac := NewSamplerWithOptions(
   211  				NewCore(
   212  					NewJSONEncoder(testEncoderConfig()),
   213  					&ztest.Discarder{},
   214  					DebugLevel,
   215  				),
   216  				time.Millisecond, 1, 1000)
   217  			b.ResetTimer()
   218  			b.RunParallel(func(pb *testing.PB) {
   219  				i := 0
   220  				for pb.Next() {
   221  					ent := Entry{
   222  						Level:   DebugLevel + Level(i%4),
   223  						Message: keys[i],
   224  					}
   225  					_ = fac.Check(ent, nil)
   226  					i++
   227  					if n := len(keys); i >= n {
   228  						i -= n
   229  					}
   230  				}
   231  			})
   232  		})
   233  	}
   234  }
   235  
   236  func makeSamplerCountingHook() (func(_ Entry, dec SamplingDecision), *atomic.Int64, *atomic.Int64) {
   237  	droppedCount := new(atomic.Int64)
   238  	sampledCount := new(atomic.Int64)
   239  	h := func(_ Entry, dec SamplingDecision) {
   240  		if dec&LogDropped > 0 {
   241  			droppedCount.Add(1)
   242  		} else if dec&LogSampled > 0 {
   243  			sampledCount.Add(1)
   244  		}
   245  	}
   246  	return h, droppedCount, sampledCount
   247  }
   248  
   249  func BenchmarkSampler_CheckWithHook(b *testing.B) {
   250  	hook, dropped, sampled := makeSamplerCountingHook()
   251  	for _, keys := range counterTestCases {
   252  		b.Run(fmt.Sprintf("%v keys", len(keys)), func(b *testing.B) {
   253  			fac := NewSamplerWithOptions(
   254  				NewCore(
   255  					NewJSONEncoder(testEncoderConfig()),
   256  					&ztest.Discarder{},
   257  					DebugLevel,
   258  				),
   259  				time.Millisecond,
   260  				1,
   261  				1000,
   262  				SamplerHook(hook),
   263  			)
   264  			b.ResetTimer()
   265  			b.RunParallel(func(pb *testing.PB) {
   266  				i := 0
   267  				for pb.Next() {
   268  					ent := Entry{
   269  						Level:   DebugLevel + Level(i%4),
   270  						Message: keys[i],
   271  					}
   272  					_ = fac.Check(ent, nil)
   273  					i++
   274  					if n := len(keys); i >= n {
   275  						i -= n
   276  					}
   277  				}
   278  			})
   279  			// We expect to see 1000 dropped messages for every sampled per settings,
   280  			// with a delta due to less 1000 messages getting dropped after initial one
   281  			// is sampled.
   282  			assert.Greater(b, dropped.Load()/1000, sampled.Load()-1000)
   283  			dropped.Store(0)
   284  			sampled.Store(0)
   285  		})
   286  	}
   287  }
   288  

View as plain text