cncursescurses

How to use the curses module to display the colors calculated from the HSL model?


The code is based on three main components: hue, saturation and lightness. Hue represents the color itself, such as red, green, or blue, and is represented as an angle value between 0 and 360 degrees. Saturation indicates the purity or vividness of the color, and ranges from 0 to 100%. Luminosity controls the brightness of the color, also ranging from 0 to 100%.
The result is:
enter image description here

But when trying to transcribe the code to the curse module, I get totally different color bands.

enter image description here
Here is the transcript I tried to make for curses, on pastebin.
The font below is the main script which prints at the expected length. How to get the same lengths in curses module?

#include <stdio.h>
#include <stdlib.h>
#include <math.h>

typedef struct {
  int r;
  int g;
  int b;
} RGB;

RGB newRGB(int r, int g, int b) {
  RGB color;
  color.r = r;
  color.g = g;
  color.b = b;
  return color;
}

void calcColor(RGB* colors, int totalColors, int currentIndex, float saturation, float lightness);

int main() {
  int totalColors = 60;
  RGB colors[totalColors];
  float saturation = 1.0;
  float lightness = 0.3;

  int end = totalColors - 18; // Define the final index of the last desired colors
  int start = end - 42;

  for (int i = start; i <= end; i++) {
    calcColor(&colors[i], totalColors, i, saturation, lightness);
  }

  // Print the colors in a block
  for (int i = end; i >= start; i--) {
    printf("\033[48;2;%d;%d;%dm  \033[0m", colors[i].r, colors[i].g, colors[i].b);
  }

  return 0;
}

void calcColor(RGB* colors, int totalColors, int currentIndex, float saturation, float lightness) {
  float hue = currentIndex * (360.0 / totalColors);
  float chroma = (1 - fabs(2 * lightness - 1)) * saturation;
  float x = chroma * (1 - fabs(fmod(hue / 60, 2) - 1));
  float m = lightness - chroma / 2;

  float r, g, b;
  if (hue >= 0 && hue < 60) {
    r = chroma;
    g = x;
    b = 0;
  } else if (hue >= 60 && hue < 120) {
    r = x;
    g = chroma;
    b = 0;
  } else if (hue >= 120 && hue < 180) {
    r = 0;
    g = chroma;
    b = x;
  } else if (hue >= 180 && hue < 240) {
    r = 0;
    g = x;
    b = chroma;
  } else if (hue >= 240 && hue < 300) {
    r = x;
    g = 0;
    b = chroma;
  } else {
    r = chroma;
    g = 0;
    b = x;
  }

  colors->r = (r + m) * 255;
  colors->g = (g + m) * 255;
  colors->b = (b + m) * 255;
}

Solution

  • One problem is that your code is setting color pairs i+16, using color i and then turning on color pair i:

    for (int i = end; i >= start; i--) {
        calcColor(&colors[i], totalColors, i, saturation, lightness);
     
        // Convert RGB colors to curses default model
        int r = colors[i].r * 1000 / 255;
        int g = colors[i].g * 1000 / 255;
        int b = colors[i].b * 1000 / 255;
     
        // Set color in curses
        init_color(i + 16, r, g, b);  // need to set color pair i+16
        init_pair(i + 16, COLOR_BLACK, i + 16);
     
       // Print the color
        attron(COLOR_PAIR(i + 16));  // need to activate color pair i+16
        printw("  ");
        attroff(COLOR_PAIR(i + 16));
    }
    

    The problem with the funny color bands is almost certainly due to your terminal emulator not supporting changing colors. See my comment under an earlier question. Try a different terminal emulator. This works in Xfce Terminal for me.

    enter image description here