dudeLite: Range: a simple little numeric range iterator; Yet another solution: but with cats added

TLDR;

code

var u8_1to5 = Range(i8,1,5) {}; // Decimal into signed 8 bit
while ( u8_1to5.step( 1) ) |index| { std.debug.print("Number={} Type={any},  ",.{index, @TypeOf(index)}); }

output

Number=1 Type=i8,  Number=2 Type=i8,  Number=3 Type=i8,  Number=4 Type=i8,  Number=5 Type=i8,

or

var utf8_catface = Range(u21,'😺','😾'){}; // Unicode encoding
while ( utf8_catface.step(1) ) |index| { std.debug.print("{u} ",.{index}); }

output

😺 😻 😼 😽 😾

I hope zig are not all dog lovers :dog:

Anyway, :paw_prints::paw_prints::paw_prints: moving on

Stack over flow has a question Zig for loop range iterator with custom int type - Stack Overflow
And so did ziggit For-loop counter other than usize - #25 by mnemnion

The question was essentially

Question) A For-loop, with a range syntax of (0…10), will use a usize type. Can we make it u21?

for (0..10) |i| {} // `i` is `usize`

Inital Response1) Why u21? TLDR answer; UTF8 - See Example 3; UTF8_catface. :smiley_cat:

On with the show

This has resulted in much digital ink being spilled. There has been some excellent points made.
But I would like to draw your attention to Dude_the_Builders excellent showcase called Range
here. Range: a simple little numeric range iterator

I decided to rewrite it. I was tempted to call it dudeLite rather than Range. Sanity prevailed.
The goal was to make it as small, readable, simple and fun as possible. No Floats or edge cases.

Tell me your thoughts. This is my first showcase.

Output

Number=1 Type=i8,  Number=2 Type=i8,  Number=3 Type=i8,  Number=4 Type=i8,  Number=5 Type=i8,
Number=5 Type=i8,  Number=4 Type=i8,  Number=3 Type=i8,  Number=2 Type=i8,  Number=1 Type=i8,
Z X V T R P N L J H F D B
😺 😻 😼 😽 😾
945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969
😾 😽 😼 😻 😺

Did something print below this?
Did something print above this?

Code

const std = @import("std");

pub fn main() void {

    ////////////////////////////////////////////////////////////
    // Example 1a - 1,2,3,4,5 - Simple
    var u8_1to5 = Range(i8,1,5) {}; // Decimal into signed 8 bit
    while ( u8_1to5.step( 1) ) |index| { std.debug.print("Number={} Type={any},  ",.{index, @TypeOf(index)}); }
    std.debug.print("\n",.{});

    // Example 1b - 5,4,3,2,1 - reuse u8_1to5
    //var u8_1to5 = Range(u8,1,5) {}; // already created above
    while ( u8_1to5.step(-1) ) |index| { std.debug.print("Number={} Type={any},  ",.{index, @TypeOf(index)}); }
    std.debug.print("\n",.{});


    ////////////////////////////////////////////////////////////
    // Example 2 - Ascii 7 backwardsi, in steps of 2? Z,X .. 81?
    var ascii_AtoZ = Range(u7,'A','Z'){}; // Ascii into unsigned 7 bit
    while ( ascii_AtoZ.step(-2) ) |index| { std.debug.print("{c} ",.{index}); }
    std.debug.print("\n",.{});

    // Example 3 - UTF8_catface, Meow! Purrrr
    var utf8_catface = Range(u21,'😺','😾'){}; // Unicode into unsigned 21 bit
    while ( utf8_catface.step(1) ) |index| { std.debug.print("{u} ",.{index}); }
    std.debug.print("\n",.{});

    // Example 4 - A-Z ? in another language? Don't ask me which one. It's all greek to me.
    var utf16_AtoZ = Range(u16,0x3b1,0x3c9){}; // Hex into unsigned 16 bit
    while ( utf16_AtoZ.step(1) ) |index| { std.debug.print("{} ",.{index}); }
    std.debug.print("\n",.{});


    ////////////////////////////////////////////////////////////
    // Obviously we can reuse the ranges; with a different step; Catface .... backwards
    while ( utf8_catface.step(-1) ) |index| { std.debug.print("{u} ",.{index}); }
    std.debug.print("\n",.{});

    // step 0 isn't endless, it's a skip this
    std.debug.print("\nDid something print below this?\n",.{});
    while ( u8_1to5.step(0) ) |index| { std.debug.print("Number={} Type={any},  ",.{index, @TypeOf(index)}); }
    std.debug.print("Did something print above this?\n",.{});

}

// This runs in comptime and creates a runtime struct
pub fn Range(comptime T: type, bottom:comptime_int, top:comptime_int) type {
    comptime std.debug.assert(top>bottom); // bottom number needs to be lower than top ie Range(u8,0,10) not Range(u8,10,0)
    switch (@typeInfo(T)) {
        .Int => {
             return struct {
                 rangeBottom:T = @intCast(bottom),
                 rangeTop:T = @intCast(top),
                 rangeLastVal:?T = null,

                 const Self = @This();
                 pub fn step(self: *Self, by:comptime_int) ?T {
                     if (by==0) return null; // skip the while loop
                     if (by >= 1) {
                         if (self.rangeLastVal == null) { self.rangeLastVal = self.rangeBottom;return self.rangeBottom; }
                         if ((self.rangeLastVal.? + by) > self.rangeTop) {self.rangeLastVal = null; return null;}
                         self.rangeLastVal = self.rangeLastVal.? + by; return self.rangeLastVal;
                         }
                     else if (by <= -1) {
                         const by_minus=by*-1; // if we are using unsigned, we need to make our comptime a positive number
                         if (self.rangeLastVal == null) { self.rangeLastVal = self.rangeTop;return self.rangeTop; }
                         if ((self.rangeLastVal.? - by_minus) < self.rangeBottom) {self.rangeLastVal = null; return null;}
                         self.rangeLastVal = self.rangeLastVal.? - by_minus; return self.rangeLastVal;
                     } // end of if
                 } // end of fn
            }; // end struct
       }, // end of int switch
       else => { @compileError("!! Type need to be an integer or a signed integer,eg i8 in zig>var u8_1to5 = Range(i8,1,5) {};"); },
    } // end of switch
}

3 Likes