1f50a7f3dSThomas Gleixner // SPDX-License-Identifier: GPL-2.0-only
23752e453SMichael Ellerman /*
33752e453SMichael Ellerman  * Copyright 2014, Michael Ellerman, IBM Corp.
43752e453SMichael Ellerman  */
53752e453SMichael Ellerman 
63752e453SMichael Ellerman #include <stdbool.h>
73752e453SMichael Ellerman #include <stdio.h>
83752e453SMichael Ellerman #include <stdlib.h>
93752e453SMichael Ellerman 
103752e453SMichael Ellerman #include "ebb.h"
113752e453SMichael Ellerman 
123752e453SMichael Ellerman 
133752e453SMichael Ellerman #define NUMBER_OF_EBBS	50
143752e453SMichael Ellerman 
153752e453SMichael Ellerman /*
163752e453SMichael Ellerman  * Test that if we overflow the counter while in the EBB handler, we take
173752e453SMichael Ellerman  * another EBB on exiting from the handler.
183752e453SMichael Ellerman  *
193752e453SMichael Ellerman  * We do this by counting with a stupidly low sample period, causing us to
203752e453SMichael Ellerman  * overflow the PMU while we're still in the EBB handler, leading to another
213752e453SMichael Ellerman  * EBB.
223752e453SMichael Ellerman  *
233752e453SMichael Ellerman  * We get out of what would otherwise be an infinite loop by leaving the
243752e453SMichael Ellerman  * counter frozen once we've taken enough EBBs.
253752e453SMichael Ellerman  */
263752e453SMichael Ellerman 
ebb_callee(void)273752e453SMichael Ellerman static void ebb_callee(void)
283752e453SMichael Ellerman {
293752e453SMichael Ellerman 	uint64_t siar, val;
303752e453SMichael Ellerman 
313752e453SMichael Ellerman 	val = mfspr(SPRN_BESCR);
323752e453SMichael Ellerman 	if (!(val & BESCR_PMEO)) {
333752e453SMichael Ellerman 		ebb_state.stats.spurious++;
343752e453SMichael Ellerman 		goto out;
353752e453SMichael Ellerman 	}
363752e453SMichael Ellerman 
373752e453SMichael Ellerman 	ebb_state.stats.ebb_count++;
383752e453SMichael Ellerman 	trace_log_counter(ebb_state.trace, ebb_state.stats.ebb_count);
393752e453SMichael Ellerman 
403752e453SMichael Ellerman 	/* Resets the PMC */
413752e453SMichael Ellerman 	count_pmc(1, sample_period);
423752e453SMichael Ellerman 
433752e453SMichael Ellerman out:
443752e453SMichael Ellerman 	if (ebb_state.stats.ebb_count == NUMBER_OF_EBBS)
453752e453SMichael Ellerman 		/* Reset but leave counters frozen */
463752e453SMichael Ellerman 		reset_ebb_with_clear_mask(MMCR0_PMAO);
473752e453SMichael Ellerman 	else
483752e453SMichael Ellerman 		/* Unfreezes */
493752e453SMichael Ellerman 		reset_ebb();
503752e453SMichael Ellerman 
513752e453SMichael Ellerman 	/* Do some stuff to chew some cycles and pop the counter */
523752e453SMichael Ellerman 	siar = mfspr(SPRN_SIAR);
533752e453SMichael Ellerman 	trace_log_reg(ebb_state.trace, SPRN_SIAR, siar);
543752e453SMichael Ellerman 
553752e453SMichael Ellerman 	val = mfspr(SPRN_PMC1);
563752e453SMichael Ellerman 	trace_log_reg(ebb_state.trace, SPRN_PMC1, val);
573752e453SMichael Ellerman 
583752e453SMichael Ellerman 	val = mfspr(SPRN_MMCR0);
593752e453SMichael Ellerman 	trace_log_reg(ebb_state.trace, SPRN_MMCR0, val);
603752e453SMichael Ellerman }
613752e453SMichael Ellerman 
back_to_back_ebbs(void)623752e453SMichael Ellerman int back_to_back_ebbs(void)
633752e453SMichael Ellerman {
643752e453SMichael Ellerman 	struct event event;
653752e453SMichael Ellerman 
6639fcfb91SDenis Kirjanov 	SKIP_IF(!ebb_is_supported());
6739fcfb91SDenis Kirjanov 
683752e453SMichael Ellerman 	event_init_named(&event, 0x1001e, "cycles");
693752e453SMichael Ellerman 	event_leader_ebb_init(&event);
703752e453SMichael Ellerman 
713752e453SMichael Ellerman 	event.attr.exclude_kernel = 1;
723752e453SMichael Ellerman 	event.attr.exclude_hv = 1;
733752e453SMichael Ellerman 	event.attr.exclude_idle = 1;
743752e453SMichael Ellerman 
753752e453SMichael Ellerman 	FAIL_IF(event_open(&event));
763752e453SMichael Ellerman 
773752e453SMichael Ellerman 	setup_ebb_handler(ebb_callee);
783752e453SMichael Ellerman 
793752e453SMichael Ellerman 	FAIL_IF(ebb_event_enable(&event));
803752e453SMichael Ellerman 
813752e453SMichael Ellerman 	sample_period = 5;
823752e453SMichael Ellerman 
833752e453SMichael Ellerman 	ebb_freeze_pmcs();
843752e453SMichael Ellerman 	mtspr(SPRN_PMC1, pmc_sample_period(sample_period));
853752e453SMichael Ellerman 	ebb_global_enable();
863752e453SMichael Ellerman 	ebb_unfreeze_pmcs();
873752e453SMichael Ellerman 
883752e453SMichael Ellerman 	while (ebb_state.stats.ebb_count < NUMBER_OF_EBBS)
893752e453SMichael Ellerman 		FAIL_IF(core_busy_loop());
903752e453SMichael Ellerman 
913752e453SMichael Ellerman 	ebb_global_disable();
923752e453SMichael Ellerman 	ebb_freeze_pmcs();
933752e453SMichael Ellerman 
943752e453SMichael Ellerman 	dump_ebb_state();
953752e453SMichael Ellerman 
963752e453SMichael Ellerman 	event_close(&event);
973752e453SMichael Ellerman 
983752e453SMichael Ellerman 	FAIL_IF(ebb_state.stats.ebb_count != NUMBER_OF_EBBS);
993752e453SMichael Ellerman 
1003752e453SMichael Ellerman 	return 0;
1013752e453SMichael Ellerman }
1023752e453SMichael Ellerman 
main(void)1033752e453SMichael Ellerman int main(void)
1043752e453SMichael Ellerman {
1053752e453SMichael Ellerman 	return test_harness(back_to_back_ebbs, "back_to_back_ebbs");
1063752e453SMichael Ellerman }
107