14#include "internal/compar.h"
15#include "internal/enum.h"
16#include "internal/hash.h"
17#include "internal/imemo.h"
18#include "internal/numeric.h"
19#include "internal/object.h"
20#include "internal/proc.h"
21#include "internal/rational.h"
22#include "internal/re.h"
24#include "ruby_assert.h"
31static ID id__separator;
32static ID id_chunk_categorize;
33static ID id_chunk_enumerable;
34static ID id_sliceafter_enum;
35static ID id_sliceafter_pat;
36static ID id_sliceafter_pred;
37static ID id_slicebefore_enumerable;
38static ID id_slicebefore_sep_pat;
39static ID id_slicebefore_sep_pred;
40static ID id_slicewhen_enum;
41static ID id_slicewhen_inverted;
42static ID id_slicewhen_pred;
48#define id_lshift idLTLT
53rb_enum_values_pack(
int argc,
const VALUE *argv)
55 if (argc == 0)
return Qnil;
56 if (argc == 1)
return argv[0];
60#define ENUM_WANT_SVALUE() do { \
61 i = rb_enum_values_pack(argc, argv); \
65enum_yield(
int argc,
VALUE ary)
68 return rb_yield_force_blockarg(ary);
75enum_yield_array(
VALUE ary)
80 return rb_yield_force_blockarg(ary);
89 struct MEMO *memo = MEMO_CAST(args);
92 if (
RTEST(rb_funcallv(memo->v1, id_eqq, 1, &i)) ==
RTEST(memo->u3.value)) {
93 rb_ary_push(memo->v2, i);
101 struct MEMO *memo = MEMO_CAST(args);
102 VALUE converted_element, match;
107 match =
NIL_P(converted_element) ?
Qfalse : rb_reg_match_p(memo->v1, i, 0);
108 if (match == memo->u3.value) {
109 rb_ary_push(memo->v2, i);
117 struct MEMO *memo = MEMO_CAST(args);
120 if (
RTEST(rb_funcallv(memo->v1, id_eqq, 1, &i)) ==
RTEST(memo->u3.value)) {
121 rb_ary_push(memo->v2, enum_yield(argc, i));
129 VALUE ary = rb_ary_new();
130 struct MEMO *memo = MEMO_NEW(pat, ary, test);
136 LIKELY(rb_method_basic_definition_p(
CLASS_OF(pat), idEqq))) {
175 return enum_grep0(obj, pat,
Qtrue);
207 return enum_grep0(obj, pat,
Qfalse);
210#define COUNT_BIGNUM IMEMO_FL_USER0
211#define MEMO_V3_SET(m, v) RB_OBJ_WRITE((m), &(m)->u3.value, (v))
214imemo_count_up(
struct MEMO *memo)
216 if (memo->flags & COUNT_BIGNUM) {
217 MEMO_V3_SET(memo, rb_int_succ(memo->u3.value));
219 else if (++memo->u3.cnt == 0) {
221 unsigned long buf[2] = {0, 1};
222 MEMO_V3_SET(memo, rb_big_unpack(buf, 2));
223 memo->flags |= COUNT_BIGNUM;
228imemo_count_value(
struct MEMO *memo)
230 if (memo->flags & COUNT_BIGNUM) {
231 return memo->u3.value;
241 struct MEMO *memo = MEMO_CAST(memop);
246 imemo_count_up(memo);
254 struct MEMO *memo = MEMO_CAST(memop);
257 imemo_count_up(memo);
265 struct MEMO *memo = MEMO_CAST(memop);
267 imemo_count_up(memo);
315 rb_warn(
"given block not used");
320 memo = MEMO_NEW(item, 0, 0);
322 return imemo_count_value(memo);
330 if (
RTEST(enum_yield(argc, i))) {
331 struct MEMO *memo = MEMO_CAST(memop);
332 MEMO_V1_SET(memo, i);
368 memo = MEMO_NEW(
Qundef, 0, 0);
373 if (!
NIL_P(if_none)) {
374 return rb_funcallv(if_none, id_call, 0, 0);
382 struct MEMO *memo = MEMO_CAST(memop);
387 MEMO_V1_SET(memo, imemo_count_value(memo));
390 imemo_count_up(memo);
397 struct MEMO *memo = MEMO_CAST(memop);
400 MEMO_V1_SET(memo, imemo_count_value(memo));
403 imemo_count_up(memo);
432enum_find_index(
int argc,
VALUE *argv,
VALUE obj)
440 func = find_index_iter_i;
445 rb_warn(
"given block not used");
450 memo = MEMO_NEW(
Qnil, condition_value, 0);
460 if (
RTEST(enum_yield(argc, i))) {
469 return rb_check_funcall_default(self, id_size, 0, 0,
Qnil);
473limit_by_enum_size(
VALUE obj,
long n)
479 return ((
unsigned long)n > limit) ? (long)limit : n;
483enum_size_over_p(
VALUE obj,
long n)
487 return ((
unsigned long)n >
FIX2ULONG(size));
509enum_find_all(
VALUE obj)
550enum_filter_map(
VALUE obj)
568 if (!
RTEST(enum_yield(argc, i))) {
593enum_reject(
VALUE obj)
616 rb_ary_push(ary, rb_enum_values_pack(argc, argv));
638enum_collect(
VALUE obj)
641 int min_argc, max_argc;
646 min_argc = rb_block_min_max_arity(&max_argc);
647 rb_lambda_call(obj, id_each, 0, 0, collect_i, min_argc, max_argc, ary);
658 tmp = rb_check_array_type(i);
664 rb_ary_concat(ary, tmp);
689enum_flat_map(
VALUE obj)
714 VALUE ary = rb_ary_new();
731 return enum_hashify_into(obj, argc, argv, iter, rb_hash_new());
738 return rb_hash_set_pair(hash, i);
772 return enum_hashify(obj, argc, argv, iter);
778 struct MEMO *memo = MEMO_CAST(p);
782 if (UNDEF_P(memo->v1)) {
783 MEMO_V1_SET(memo, i);
794 struct MEMO *memo = MEMO_CAST(p);
799 if (UNDEF_P(memo->v1)) {
800 MEMO_V1_SET(memo, i);
802 else if (
SYMBOL_P(name = memo->u3.value)) {
810 MEMO_V1_SET(memo, rb_f_send(numberof(args), args, memo->v1));
823 return UNDEF_P(init) ?
Qnil : init;
839 rb_method_basic_definition_p(
rb_cInteger, idPLUS) &&
851 else if (RB_BIGNUM_TYPE_P(e))
852 v = rb_big_plus(e, v);
1031 op =
id ?
ID2SYM(
id) : init;
1045 if (iter == inject_op_i &&
1048 rb_method_basic_definition_p(
CLASS_OF(obj), id_each)) {
1049 return ary_inject_op(obj, init, op);
1052 memo = MEMO_NEW(init,
Qnil, op);
1054 if (UNDEF_P(memo->v1))
return Qnil;
1061 struct MEMO *memo = MEMO_CAST(arys);
1065 if (
RTEST(enum_yield(argc, i))) {
1071 rb_ary_push(ary, i);
1104enum_partition(
VALUE obj)
1110 memo = MEMO_NEW(rb_ary_new(), rb_ary_new(), 0);
1113 return rb_assoc_new(memo->v1, memo->v2);
1124 group = enum_yield(argc, i);
1125 values = rb_hash_aref(hash, group);
1128 rb_hash_aset(hash, group, values);
1131 rb_ary_push(values, i);
1159enum_group_by(
VALUE obj)
1163 return enum_hashify(obj, 0, 0, group_by_i);
1167tally_up(st_data_t *group, st_data_t *value, st_data_t arg,
int existing)
1175 tally +=
INT2FIX(1) & ~FIXNUM_FLAG;
1179 tally = rb_big_plus(tally,
INT2FIX(1));
1182 *value = (st_data_t)tally;
1190 rb_hash_stlike_update(hash, group, tally_up, (st_data_t)hash);
1198 rb_enum_tally_up(hash, i);
1235 hash = rb_to_hash_type(argv[0]);
1239 hash = rb_hash_new();
1242 return enum_hashify_into(obj, 0, 0, tally_i, hash);
1249 struct MEMO *memo = MEMO_CAST(params);
1252 MEMO_V1_SET(memo, i);
1291 return enum_take(obj, argv[0]);
1294 memo = MEMO_NEW(
Qnil, 0, 0);
1335 return rb_ary_sort_bang(enum_to_a(0, 0, obj));
1338#define SORT_BY_BUFSIZE 16
1349 VALUE ary = data->ary;
1354 v = enum_yield(argc, i);
1356 if (
RBASIC(ary)->klass) {
1359 if (
RARRAY_LEN(data->buf) != SORT_BY_BUFSIZE*2) {
1366 if (data->n == SORT_BY_BUFSIZE) {
1367 rb_ary_concat(ary, data->buf);
1374sort_by_cmp(
const void *ap,
const void *bp,
void *data)
1380 if (
RBASIC(ary)->klass) {
1387 return OPTIMIZED_CMP(a, b);
1473enum_sort_by(
VALUE obj)
1488 RBASIC_CLEAR_CLASS(ary);
1489 buf = rb_ary_hidden_new(SORT_BY_BUFSIZE*2);
1490 rb_ary_store(buf, SORT_BY_BUFSIZE*2-1,
Qnil);
1491 memo = MEMO_NEW(0, 0, 0);
1500 rb_ary_resize(buf, data->n*2);
1501 rb_ary_concat(ary, buf);
1506 sort_by_cmp, (
void *)ary));
1508 if (
RBASIC(ary)->klass) {
1520#define ENUMFUNC(name) argc ? name##_eqq : rb_block_given_p() ? name##_iter_i : name##_i
1522#define MEMO_ENUM_NEW(v1) (rb_check_arity(argc, 0, 1), MEMO_NEW((v1), (argc ? *argv : 0), 0))
1524#define DEFINE_ENUMFUNCS(name) \
1525static VALUE enum_##name##_func(VALUE result, struct MEMO *memo); \
1528name##_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, memo)) \
1530 return enum_##name##_func(rb_enum_values_pack(argc, argv), MEMO_CAST(memo)); \
1534name##_iter_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, memo)) \
1536 return enum_##name##_func(rb_yield_values2(argc, argv), MEMO_CAST(memo)); \
1540name##_eqq(RB_BLOCK_CALL_FUNC_ARGLIST(i, memo)) \
1542 ENUM_WANT_SVALUE(); \
1543 return enum_##name##_func(rb_funcallv(MEMO_CAST(memo)->v2, id_eqq, 1, &i), MEMO_CAST(memo)); \
1547enum_##name##_func(VALUE result, struct MEMO *memo)
1549#define WARN_UNUSED_BLOCK(argc) do { \
1550 if ((argc) > 0 && rb_block_given_p()) { \
1551 rb_warn("given block not used"); \
1555DEFINE_ENUMFUNCS(all)
1557 if (!
RTEST(result)) {
1558 MEMO_V1_SET(memo,
Qfalse);
1610 struct MEMO *memo = MEMO_ENUM_NEW(
Qtrue);
1611 WARN_UNUSED_BLOCK(argc);
1616DEFINE_ENUMFUNCS(any)
1618 if (
RTEST(result)) {
1619 MEMO_V1_SET(memo,
Qtrue);
1671 WARN_UNUSED_BLOCK(argc);
1676DEFINE_ENUMFUNCS(one)
1678 if (
RTEST(result)) {
1679 if (UNDEF_P(memo->v1)) {
1680 MEMO_V1_SET(memo,
Qtrue);
1682 else if (memo->v1 ==
Qtrue) {
1683 MEMO_V1_SET(memo,
Qfalse);
1696 int (*cmpfunc)(
const void *,
const void *,
void *);
1704 if (
RBASIC(data->buf)->klass) {
1706 data->rev ?
"max" :
"min",
1707 data->by ?
"_by" :
"");
1713nmin_cmp(
const void *ap,
const void *bp,
void *_data)
1717#define rb_cmpint(cmp, a, b) rb_cmpint(cmpint_reenter_check(data, (cmp)), a, b)
1718 return OPTIMIZED_CMP(a, b);
1723nmin_block_cmp(
const void *ap,
const void *bp,
void *_data)
1728 cmpint_reenter_check(data, cmp);
1729 return rb_cmpint(cmp, a, b);
1745 if (data->curlen <= data->n)
1750 eltsize = data->by ? 2 : 1;
1751 numelts = data->curlen;
1756#define GETPTR(i) (beg+(i)*eltsize)
1758#define SWAP(i, j) do { \
1760 memcpy(tmp, GETPTR(i), sizeof(VALUE)*eltsize); \
1761 memcpy(GETPTR(i), GETPTR(j), sizeof(VALUE)*eltsize); \
1762 memcpy(GETPTR(j), tmp, sizeof(VALUE)*eltsize); \
1766 long pivot_index = left + (right-left)/2;
1767 long num_pivots = 1;
1769 SWAP(pivot_index, right);
1770 pivot_index = right;
1774 while (i <= right-num_pivots) {
1775 int c = data->cmpfunc(GETPTR(i), GETPTR(pivot_index), data);
1779 SWAP(i, right-num_pivots);
1784 SWAP(i, store_index);
1790 for (i = right; right-num_pivots < i; i--) {
1797 if (store_index <= n && n <= store_index+num_pivots)
1800 if (n < store_index) {
1801 right = store_index-1;
1804 left = store_index+num_pivots;
1810 data->limit =
RARRAY_AREF(data->buf, store_index*eltsize);
1811 data->curlen = data->n;
1812 rb_ary_resize(data->buf, data->n * eltsize);
1824 cmpv = enum_yield(argc, i);
1828 if (!UNDEF_P(data->limit)) {
1829 int c = data->cmpfunc(&cmpv, &data->limit, data);
1837 rb_ary_push(data->buf, cmpv);
1838 rb_ary_push(data->buf, i);
1842 if (data->curlen == data->bufmax) {
1850rb_nmin_run(
VALUE obj,
VALUE num,
int by,
int rev,
int ary)
1860 if (LONG_MAX/4/(by ? 2 : 1) < data.n)
1862 data.bufmax = data.n * 4;
1864 data.buf = rb_ary_hidden_new(data.bufmax * (by ? 2 : 1));
1866 data.cmpfunc = by ? nmin_cmp :
1890 data.cmpfunc, (
void *)&data);
1900 data.cmpfunc, (
void *)&data);
1904 rb_ary_reverse(result);
1960 WARN_UNUSED_BLOCK(argc);
1963 if (UNDEF_P(result))
return Qfalse;
1967DEFINE_ENUMFUNCS(none)
1969 if (
RTEST(result)) {
1970 MEMO_V1_SET(memo,
Qfalse);
2019 struct MEMO *memo = MEMO_ENUM_NEW(
Qtrue);
2021 WARN_UNUSED_BLOCK(argc);
2033 struct min_t *memo = MEMO_FOR(
struct min_t, args);
2037 if (UNDEF_P(memo->min)) {
2041 if (OPTIMIZED_CMP(i, memo->min) < 0) {
2052 struct min_t *memo = MEMO_FOR(
struct min_t, args);
2056 if (UNDEF_P(memo->min)) {
2061 if (rb_cmpint(cmp, i, memo->min) < 0) {
2130 struct min_t *m = NEW_MEMO_FOR(
struct min_t, memo);
2135 return rb_nmin_run(obj, num, 0, 0, 0);
2145 if (UNDEF_P(result))
return Qnil;
2156 struct max_t *memo = MEMO_FOR(
struct max_t, args);
2160 if (UNDEF_P(memo->max)) {
2164 if (OPTIMIZED_CMP(i, memo->max) > 0) {
2174 struct max_t *memo = MEMO_FOR(
struct max_t, args);
2179 if (UNDEF_P(memo->max)) {
2184 if (rb_cmpint(cmp, i, memo->max) > 0) {
2252 struct max_t *m = NEW_MEMO_FOR(
struct max_t, memo);
2257 return rb_nmin_run(obj, num, 0, 1, 0);
2267 if (UNDEF_P(result))
return Qnil;
2282 if (UNDEF_P(memo->min)) {
2287 n = OPTIMIZED_CMP(i, memo->min);
2291 n = OPTIMIZED_CMP(j, memo->max);
2307 if (UNDEF_P(memo->last)) {
2314 n = OPTIMIZED_CMP(j, i);
2324 minmax_i_update(i, j, memo);
2334 if (UNDEF_P(memo->min)) {
2359 if (UNDEF_P(memo->last)) {
2376 minmax_ii_update(i, j, memo);
2413enum_minmax(
VALUE obj)
2422 if (!UNDEF_P(m->last))
2423 minmax_ii_update(m->last, m->last, m);
2427 if (!UNDEF_P(m->last))
2428 minmax_i_update(m->last, m->last, m);
2430 if (!UNDEF_P(m->min)) {
2431 return rb_assoc_new(m->min, m->max);
2439 struct MEMO *memo = MEMO_CAST(args);
2444 v = enum_yield(argc, i);
2445 if (UNDEF_P(memo->v1)) {
2446 MEMO_V1_SET(memo, v);
2447 MEMO_V2_SET(memo, i);
2449 else if (OPTIMIZED_CMP(v, memo->v1) < 0) {
2450 MEMO_V1_SET(memo, v);
2451 MEMO_V2_SET(memo, i);
2502 if (argc && !
NIL_P(num = argv[0]))
2503 return rb_nmin_run(obj, num, 1, 0, 0);
2513 struct MEMO *memo = MEMO_CAST(args);
2518 v = enum_yield(argc, i);
2519 if (UNDEF_P(memo->v1)) {
2520 MEMO_V1_SET(memo, v);
2521 MEMO_V2_SET(memo, i);
2523 else if (OPTIMIZED_CMP(v, memo->v1) > 0) {
2524 MEMO_V1_SET(memo, v);
2525 MEMO_V2_SET(memo, i);
2576 if (argc && !
NIL_P(num = argv[0]))
2577 return rb_nmin_run(obj, num, 1, 1, 0);
2596 if (UNDEF_P(memo->min_bv)) {
2603 if (OPTIMIZED_CMP(v1, memo->min_bv) < 0) {
2607 if (OPTIMIZED_CMP(v2, memo->max_bv) > 0) {
2623 vi = enum_yield(argc, i);
2625 if (UNDEF_P(memo->last_bv)) {
2634 n = OPTIMIZED_CMP(vj, vi);
2649 minmax_by_i_update(vi, vj, i, j, memo);
2678enum_minmax_by(
VALUE obj)
2692 if (!UNDEF_P(m->last_bv))
2693 minmax_by_i_update(m->last_bv, m->last_bv, m->last, m->last, m);
2695 return rb_assoc_new(m->min, m->max);
2701 struct MEMO *memo = MEMO_CAST(args);
2703 if (
rb_equal(rb_enum_values_pack(argc, argv), memo->v1)) {
2704 MEMO_V2_SET(memo,
Qtrue);
2732 struct MEMO *memo = MEMO_NEW(val,
Qfalse, 0);
2741 struct MEMO *m = MEMO_CAST(memo);
2742 VALUE n = imemo_count_value(m);
2776enum_each_with_index(
int argc,
VALUE *argv,
VALUE obj)
2782 memo = MEMO_NEW(0, 0, 0);
2815enum_reverse_each(
int argc,
VALUE *argv,
VALUE obj)
2822 ary = enum_to_a(argc, argv, obj);
2842 enum_yield(argc, i);
2885enum_each_entry(
int argc,
VALUE *argv,
VALUE obj)
2893add_int(
VALUE x,
long n)
2897 return rb_funcallv(x,
'+', 1, &y);
2901div_int(
VALUE x,
long n)
2905 return rb_funcallv(x, id_div, 1, &y);
2908#define dont_recycle_block_arg(arity) ((arity) == 1 || (arity) < 0)
2913 struct MEMO *memo = MEMO_CAST(m);
2914 VALUE ary = memo->v1;
2916 long size = memo->u3.cnt;
2919 rb_ary_push(ary, i);
2944 size = enum_size(obj, 0, 0);
2950 n = add_int(size, slice_size-1);
2951 return div_int(n, slice_size);
2984 size = limit_by_enum_size(obj, size);
2986 arity = rb_block_arity();
2987 memo = MEMO_NEW(ary, dont_recycle_block_arg(arity), size);
2998 struct MEMO *memo = MEMO_CAST(args);
2999 VALUE ary = memo->v1;
3001 long size = memo->u3.cnt;
3007 rb_ary_push(ary, i);
3010 ary = rb_ary_dup(ary);
3025 size = enum_size(obj, 0, 0);
3028 n = add_int(size, 1 - cons_size);
3029 return (OPTIMIZED_CMP(n, zero) == -1) ? zero : n;
3061 arity = rb_block_arity();
3062 if (enum_size_over_p(obj, size))
return obj;
3063 memo = MEMO_NEW(
rb_ary_new2(size), dont_recycle_block_arg(arity), size);
3098 rb_block_call(obj, id_each, 0, 0, each_with_object_i, memo);
3106 struct MEMO *memo = (
struct MEMO *)memoval;
3107 VALUE result = memo->v1;
3108 VALUE args = memo->v2;
3109 long n = memo->u3.cnt++;
3114 rb_ary_store(tmp, 0, rb_enum_values_pack(argc, argv));
3119 rb_ary_push(tmp,
Qnil);
3125 if (
NIL_P(result)) {
3126 enum_yield_array(tmp);
3129 rb_ary_push(result, tmp);
3141 return v[0] = rb_funcallv(v[1], id_next, 0, 0);
3154 struct MEMO *memo = (
struct MEMO *)memoval;
3155 VALUE result = memo->v1;
3156 VALUE args = memo->v2;
3161 rb_ary_store(tmp, 0, rb_enum_values_pack(argc, argv));
3164 rb_ary_push(tmp,
Qnil);
3171 if (UNDEF_P(v[0])) {
3175 rb_ary_push(tmp, v[0]);
3178 if (
NIL_P(result)) {
3179 enum_yield_array(tmp);
3182 rb_ary_push(result, tmp);
3268 for (i=0; i<argc; i++) {
3269 VALUE ary = rb_check_array_type(argv[i]);
3277 static const VALUE sym_each = STATIC_ID2SYM(id_each);
3279 for (i=0; i<argc; i++) {
3284 argv[i] = rb_funcallv(argv[i], conv, 1, &sym_each);
3288 result = rb_ary_new();
3292 memo = MEMO_NEW(result, args, 0);
3301 struct MEMO *memo = MEMO_CAST(args);
3302 rb_ary_push(memo->v1, rb_enum_values_pack(argc, argv));
3335 memo = MEMO_NEW(result, 0, len);
3345 rb_ary_push(ary, rb_enum_values_pack(argc, argv));
3369enum_take_while(
VALUE obj)
3382 struct MEMO *memo = MEMO_CAST(args);
3383 if (memo->u3.cnt == 0) {
3384 rb_ary_push(memo->v1, rb_enum_values_pack(argc, argv));
3422 result = rb_ary_new();
3423 memo = MEMO_NEW(result, 0, len);
3432 struct MEMO *memo = MEMO_CAST(args);
3435 if (!memo->u3.state && !
RTEST(enum_yield(argc, i))) {
3436 memo->u3.state = TRUE;
3438 if (memo->u3.state) {
3439 rb_ary_push(memo->v1, i);
3464enum_drop_while(
VALUE obj)
3470 result = rb_ary_new();
3471 memo = MEMO_NEW(result, 0, FALSE);
3481 rb_ary_push(ary, argc > 1 ? i : rb_ary_new_from_values(argc, argv));
3482 enum_yield(argc, i);
3498 size = enum_size(self, args, 0);
3499 if (
NIL_P(size) || FIXNUM_ZERO_P(size))
return size;
3502 if (mul <= 0)
return INT2FIX(0);
3504 return rb_funcallv(size,
'*', 1, &n);
3544 if (!argc ||
NIL_P(nv = argv[0])) {
3549 if (n <= 0)
return Qnil;
3552 RBASIC_CLEAR_CLASS(ary);
3555 if (len == 0)
return Qnil;
3556 while (n < 0 || 0 < --n) {
3557 for (i=0; i<len; i++) {
3581 v = rb_funcallv(argp->categorize, id_call, 1, &i);
3584 if (!
NIL_P(argp->prev_value)) {
3585 s = rb_assoc_new(argp->prev_value, argp->prev_elts);
3586 rb_funcallv(argp->yielder, id_lshift, 1, &s);
3587 argp->prev_value = argp->prev_elts =
Qnil;
3590 rb_funcallv(argp->yielder, id_lshift, 1, &v);
3592 else if (
NIL_P(v) || v == separator) {
3593 if (!
NIL_P(argp->prev_value)) {
3594 v = rb_assoc_new(argp->prev_value, argp->prev_elts);
3595 rb_funcallv(argp->yielder, id_lshift, 1, &v);
3596 argp->prev_value = argp->prev_elts =
Qnil;
3603 if (
NIL_P(argp->prev_value)) {
3604 argp->prev_value = v;
3608 if (
rb_equal(argp->prev_value, v)) {
3609 rb_ary_push(argp->prev_elts, i);
3612 s = rb_assoc_new(argp->prev_value, argp->prev_elts);
3613 rb_funcallv(argp->yielder, id_lshift, 1, &s);
3614 argp->prev_value = v;
3631 memo->prev_value =
Qnil;
3632 memo->prev_elts =
Qnil;
3637 if (!
NIL_P(memo->prev_elts)) {
3638 arg = rb_assoc_new(memo->prev_value, memo->prev_elts);
3639 rb_funcallv(memo->yielder, id_lshift, 1, &arg);
3749enum_chunk(
VALUE enumerable)
3778 if (!
NIL_P(argp->sep_pat))
3779 header_p = rb_funcallv(argp->sep_pat, id_eqq, 1, &i);
3781 header_p = rb_funcallv(argp->sep_pred, id_call, 1, &i);
3782 if (
RTEST(header_p)) {
3783 if (!
NIL_P(argp->prev_elts))
3784 rb_funcallv(argp->yielder, id_lshift, 1, &argp->prev_elts);
3788 if (
NIL_P(argp->prev_elts))
3791 rb_ary_push(argp->prev_elts, i);
3807 memo->prev_elts =
Qnil;
3810 rb_block_call(enumerable, id_each, 0, 0, slicebefore_ii, arg);
3812 if (!
NIL_P(memo->prev_elts))
3813 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
3978enum_slice_before(
int argc,
VALUE *argv,
VALUE enumerable)
3984 rb_error_arity(argc, 0, 0);
4010#define UPDATE_MEMO ((void)(memo = MEMO_FOR(struct sliceafter_arg, _memo)))
4017 if (
NIL_P(memo->prev_elts)) {
4021 rb_ary_push(memo->prev_elts, i);
4024 if (
NIL_P(memo->pred)) {
4025 split_p =
RTEST(rb_funcallv(memo->pat, id_eqq, 1, &i));
4029 split_p =
RTEST(rb_funcallv(memo->pred, id_call, 1, &i));
4034 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4036 memo->prev_elts =
Qnil;
4053 memo->prev_elts =
Qnil;
4056 rb_block_call(enumerable, id_each, 0, 0, sliceafter_ii, arg);
4058 if (!
NIL_P(memo->prev_elts))
4059 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4100enum_slice_after(
int argc,
VALUE *argv,
VALUE enumerable)
4134#define UPDATE_MEMO ((void)(memo = MEMO_FOR(struct slicewhen_arg, _memo)))
4141 if (UNDEF_P(memo->prev_elt)) {
4148 args[0] = memo->prev_elt;
4150 split_p =
RTEST(rb_funcallv(memo->pred, id_call, 2, args));
4157 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4162 rb_ary_push(memo->prev_elts, i);
4183 memo->prev_elts =
Qnil;
4187 rb_block_call(enumerable, id_each, 0, 0, slicewhen_ii, arg);
4189 if (!
NIL_P(memo->prev_elts))
4190 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4257enum_slice_when(
VALUE enumerable)
4323enum_chunk_while(
VALUE enumerable)
4351 memo->v = rb_fix_plus(
LONG2FIX(memo->n), memo->v);
4354 switch (
TYPE(memo->r)) {
4355 case T_RATIONAL: memo->v = rb_rational_plus(memo->r, memo->v);
break;
4367 memo->v = rb_big_plus(
LONG2NUM(memo->n), memo->v);
4375 memo->v = rb_big_plus(i, memo->v);
4381 if (UNDEF_P(memo->r)) {
4385 memo->r = rb_rational_plus(memo->r, i);
4392 memo->v = rb_funcallv(memo->v, idPLUS, 1, &i);
4407 case T_BIGNUM: x = rb_big2dbl(i);
break;
4411 memo->float_value = 0;
4412 sum_iter_some_value(i, memo);
4421 else if (! isfinite(x)) {
4422 if (isinf(x) && isinf(f) && signbit(x) != signbit(f)) {
4430 else if (isinf(f)) {
4437 if (fabs(f) >= fabs(x)) {
4452 assert(memo != NULL);
4453 if (memo->block_given) {
4457 if (memo->float_value) {
4458 sum_iter_Kahan_Babuska(i, memo);
4460 else switch (
TYPE(memo->v)) {
4461 default: sum_iter_some_value(i, memo);
return;
4462 case T_FLOAT: sum_iter_Kahan_Babuska(i, memo);
return;
4467 case T_FIXNUM: sum_iter_fixnum(i, memo);
return;
4468 case T_BIGNUM: sum_iter_bignum(i, memo);
return;
4469 case T_RATIONAL: sum_iter_rational(i, memo);
return;
4471 sum_iter_normalize_memo(memo);
4474 memo->float_value = 1;
4475 sum_iter_Kahan_Babuska(i, memo);
4478 sum_iter_normalize_memo(memo);
4479 sum_iter_some_value(i, memo);
4496 sum_iter(rb_assoc_new(key, value), (
struct enum_sum_memo *) arg);
4504 assert(memo != NULL);
4516 end = rb_big_minus(end,
LONG2FIX(1));
4519 if (rb_int_ge(end, beg)) {
4521 a = rb_int_plus(rb_int_minus(end, beg),
LONG2FIX(1));
4522 a = rb_int_mul(a, rb_int_plus(end, beg));
4524 return rb_int_plus(init, a);
4583 if (!memo.block_given && !memo.float_value &&
4584 (
FIXNUM_P(beg) || RB_BIGNUM_TYPE_P(beg)) &&
4585 (
FIXNUM_P(end) || RB_BIGNUM_TYPE_P(end))) {
4586 return int_range_sum(beg, end, excl, memo.v);
4591 rb_method_basic_definition_p(
CLASS_OF(obj), id_each))
4592 hash_sum(obj, &memo);
4596 if (memo.float_value) {
4597 return DBL2NUM(memo.f + memo.c);
4601 memo.v = rb_fix_plus(
LONG2FIX(memo.n), memo.v);
4602 if (!UNDEF_P(memo.r)) {
4603 memo.v = rb_rational_plus(memo.r, memo.v);
4613 rb_hash_add_new_element(hash, i, i);
4655 ret = rb_hash_values(hash);
4656 rb_hash_clear(hash);
4666 rb_ary_push(ary, i);
4683enum_compact(
VALUE obj)
4868Init_Enumerable(
void)
4940 id_slicebefore_enumerable =
rb_intern_const(
"slicebefore_enumerable");
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
VALUE rb_define_module(const char *name)
Defines a top-level module.
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
int rb_block_given_p(void)
Determines if the current method is given a block.
#define TYPE(_)
Old name of rb_type.
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define UNREACHABLE
Old name of RBIMPL_UNREACHABLE.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
#define T_HASH
Old name of RUBY_T_HASH.
#define NUM2DBL
Old name of rb_num2dbl.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define T_UNDEF
Old name of RUBY_T_UNDEF.
#define Qtrue
Old name of RUBY_Qtrue.
#define FIXNUM_MAX
Old name of RUBY_FIXNUM_MAX.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define NIL_P
Old name of RB_NIL_P.
#define DBL2NUM
Old name of rb_float_new.
#define NUM2LONG
Old name of RB_NUM2LONG.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define CONST_ID
Old name of RUBY_CONST_ID.
#define rb_ary_new2
Old name of rb_ary_new_capa.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
#define T_REGEXP
Old name of RUBY_T_REGEXP.
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_iter_break(void)
Breaks from a block.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eRuntimeError
RuntimeError exception.
VALUE rb_eStopIteration
StopIteration exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
VALUE rb_eArgError
ArgumentError exception.
void rb_warning(const char *fmt,...)
Issues a warning.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_mEnumerable
Enumerable module.
VALUE rb_cEnumerator
Enumerator class.
VALUE rb_cInteger
Module class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
double rb_num2dbl(VALUE num)
Converts an instance of rb_cNumeric into C's double.
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
VALUE rb_funcallv_public(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it only takes public methods into account.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
#define RETURN_ENUMERATOR(obj, argc, argv)
Identical to RETURN_SIZED_ENUMERATOR(), except its size is unknown.
#define rb_check_frozen
Just another name of rb_check_frozen.
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
VALUE rb_check_string_type(VALUE obj)
Try converting an object to its stringised representation using its to_str method,...
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
int rb_obj_respond_to(VALUE obj, ID mid, int private_p)
Identical to rb_respond_to(), except it additionally takes the visibility parameter.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
void ruby_qsort(void *, const size_t, const size_t, int(*)(const void *, const void *, void *), void *)
Reentrant implementation of quick sort.
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
VALUE rb_yield(VALUE val)
Yields the block.
rb_block_call_func * rb_block_call_func_t
Shorthand type that represents an iterator-written-in-C function pointer.
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
#define RARRAY_PTR_USE(ary, ptr_name, expr)
Declares a section of code where raw pointers are used.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RBASIC(obj)
Convenient casting macro.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.