ARTICLE AD BOX
I'm working with an AWT Canvas with a triple buffered BufferStrategy. The application runs a loop that renders the screen with a set maximum FPS. Specifically I'm drawing multiple characters on screen one at a time. I'm trying to measure the time that rendering the characters takes, but for some reason it seems the actual character drawing is taking longer when the FPS cap is at 60 compared to 120. Here's an isolated test case:
import java.awt.Canvas; import java.awt.Color; import java.awt.Font; import java.awt.Frame; import java.awt.Graphics2D; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.image.BufferStrategy; public class BufferStrategyTimingTest { private static final int TARGET_FPS = 60; // Try 60, 120 to see the issue private static final long TARGET_FRAME_TIME_NS = 1_000_000_000L / TARGET_FPS; private static final int COLS = 80; private static final int ROWS = 40; private static final int CHAR_WIDTH = 12; private static final int CHAR_HEIGHT = 20; private static final int CANVAS_WIDTH = COLS * CHAR_WIDTH; private static final int CANVAS_HEIGHT = ROWS * CHAR_HEIGHT; private static volatile boolean running = true; public static void main(String[] args) { Frame frame = new Frame("BufferStrategy Timing Test - Target FPS: " + TARGET_FPS); Canvas canvas = new Canvas(); canvas.setSize(CANVAS_WIDTH, CANVAS_HEIGHT); frame.add(canvas); frame.pack(); frame.setLocationRelativeTo(null); frame.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { running = false; System.exit(0); } }); frame.setVisible(true); // Initialize triple buffer strategy canvas.setIgnoreRepaint(true); canvas.createBufferStrategy(3); BufferStrategy bufferStrategy = canvas.getBufferStrategy(); // Prepare font Font font = new Font("Monospaced", Font.PLAIN, 12); char[] charBuffer = new char[1]; // Timing variables long frameCount = 0; long lastFpsCheckTime = System.nanoTime(); long totalRenderUs = 0; long totalDrawCharsUs = 0; System.out.println("=== BufferStrategy Timing Test ==="); System.out.println("Target FPS: " + TARGET_FPS); System.out.println("Frame budget: " + (TARGET_FRAME_TIME_NS / 1000) + " µs"); System.out.println("Canvas: " + COLS + "x" + ROWS + " = " + (COLS * ROWS) + " characters"); System.out.println("\nWaiting for stable FPS...\n"); // Game loop while (running) { long frameStartTime = System.nanoTime(); // Render long renderStartTime = System.nanoTime(); long drawCharsStartTime = 0; long drawCharsEndTime = 0; do { do { Graphics2D g2 = (Graphics2D) bufferStrategy.getDrawGraphics(); g2.setColor(Color.BLACK); g2.fillRect(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT); g2.setFont(font); g2.setColor(Color.WHITE); // Time the actual drawChars calls drawCharsStartTime = System.nanoTime(); // Draw characters one at a time int fontAscent = g2.getFontMetrics().getAscent(); for (int y = 0; y < ROWS; y++) { for (int x = 0; x < COLS; x++) { charBuffer[0] = (char) (33 + ((x + y) % 94)); int cellX = x * CHAR_WIDTH; int cellY = y * CHAR_HEIGHT; g2.drawChars(charBuffer, 0, 1, cellX, cellY + fontAscent); } } drawCharsEndTime = System.nanoTime(); g2.dispose(); } while (bufferStrategy.contentsRestored()); bufferStrategy.show(); } while (bufferStrategy.contentsLost()); long renderEndTime = System.nanoTime(); long renderTimeUs = (renderEndTime - renderStartTime) / 1000; long drawCharsTimeUs = (drawCharsEndTime - drawCharsStartTime) / 1000; totalRenderUs += renderTimeUs; totalDrawCharsUs += drawCharsTimeUs; frameCount++; // Report every 60 frames if (frameCount % 60 == 0) { long currentTime = System.nanoTime(); long elapsedNs = currentTime - lastFpsCheckTime; int actualFPS = (int) (60_000_000_000L / elapsedNs); lastFpsCheckTime = currentTime; long avgRenderUs = totalRenderUs / 60; long avgDrawCharsUs = totalDrawCharsUs / 60; System.out.printf("[%2d FPS] Total render: %5d µs | drawChars: %5d µs%n", actualFPS, avgRenderUs, avgDrawCharsUs); totalRenderUs = 0; totalDrawCharsUs = 0; } // Frame rate limiting long frameTime = System.nanoTime() - frameStartTime; long sleepTime = TARGET_FRAME_TIME_NS - frameTime; if (sleepTime > 0) { try { Thread.sleep(sleepTime / 1_000_000L, (int)(sleepTime % 1_000_000L)); } catch (InterruptedException e) { running = false; } } } } }Average printed results:
Linux Mint / Geforce RTX, 60fps limit: [59 FPS] Total render: 5250 µs | drawChars: 5250 µs
Linux Mint / Geforce RTX, 120fps limit: [59 FPS] Total render: 2000 µs | drawChars: 2000 µs
Windows 11 / Intel Integrated 60fps limit: [57 FPS] Total render: 8000 µs | drawChars: 4000 µs
Windows 11 / Intel Integrated 120fps limit: [109 FPS] Total render: 6750 µs | drawChars: 3250 µs
These are rough averages looking through the console. Total render time on Linux is actually always about ~25 µs higher that drawChars. These numbers are consistent ±1000µs with very occasional spikes up or down. Both systems have 60Hz monitors.
The Windows machine spends a significant amount of time on bufferStrategy.show() leading to the difference between total render and drawChars time, but that's not the topic of this question. While the difference is smaller, the Windows machine also spends less time drawing the characters when the framerate limit is higher. What could be causing this?
