#include <stdio.h>

int main() {
  int x = 65;
  
  printf("x interpreted as... Hex: %x, Unsigned int: %u, Signed int: %d\n", x, x, x);

  // Try setting y to be (hexadecimal) 0x65
  int y = 0x65;
  
  printf("y interpreted as... Hex: %x, Unsigned int: %u, Signed int: %d\n", y, y, y);
  
  // Try setting z to something that makes signed and unsigned interpretations
  //    differ (Hint: int is 4 bytes)
  int z = 0xffffffff;

  printf("z interpreted as... Hex: %x, Unsigned int: %u, Signed int: %d\n", z, z, z);

  // Try printing x out as a char (%c), or float (%f)
  // The latter will probably make the compiler complain. Can you force it?
  printf("x interpreted as... char: %c\n", x); // ASCII
  printf("x interpreted as... float (direct cast): %f\n", (float) x); // This one actually converts the number to the appropriate float
  printf("x interpreted as... float (cast pointer): %f\n", *((float*) (&x))); // This one just interprets the bytes as a float
  printf("x interpreted as... float (cast pointer): %.50f\n", *((float*) (&x))); // Same, but showing more digits
}